[GRASS-SVN] r42916 - in grass/branches/develbranch_6: include/Make lib/python lib/python/ctypes lib/python/ctypes/ctypesgencore lib/python/ctypes/ctypesgencore/parser lib/python/ctypes/ctypesgencore/printer lib/python/ctypes/ctypesgencore/processor

svn_grass at osgeo.org svn_grass at osgeo.org
Wed Jul 28 04:43:53 EDT 2010


Author: martinl
Date: 2010-07-28 08:43:53 +0000 (Wed, 28 Jul 2010)
New Revision: 42916

Added:
   grass/branches/develbranch_6/lib/python/ctypes/
   grass/branches/develbranch_6/lib/python/ctypes/Makefile
   grass/branches/develbranch_6/lib/python/ctypes/__init__.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgen.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/LICENSE
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/__init__.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/ctypedescs.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/descriptions.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/expressions.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/libraryloader.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/messages.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/old libraryloader.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/options.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/__init__.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cdeclarations.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cgrammar.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cparser.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/ctypesparser.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/datacollectingparser.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lex.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lextab.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/parsetab.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/pplexer.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/preprocessor.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/yacc.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/__init__.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/defaultheader.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/preamble.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/printer.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/test.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/__init__.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/dependencies.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/operations.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/pipeline.py
   grass/branches/develbranch_6/lib/python/ctypes/ctypesheader.py
   grass/branches/develbranch_6/lib/python/ctypes/fix.sed
   grass/branches/develbranch_6/lib/python/ctypes/loader.py
   grass/branches/develbranch_6/lib/python/ctypes/preamble.py
Modified:
   grass/branches/develbranch_6/include/Make/Rules.make
   grass/branches/develbranch_6/lib/python/Makefile
Log:
ctypes backported from trunk


Modified: grass/branches/develbranch_6/include/Make/Rules.make
===================================================================
--- grass/branches/develbranch_6/include/Make/Rules.make	2010-07-28 07:49:16 UTC (rev 42915)
+++ grass/branches/develbranch_6/include/Make/Rules.make	2010-07-28 08:43:53 UTC (rev 42916)
@@ -66,6 +66,14 @@
 %.tab.h %.tab.c: %.y
 	$(YACC) -b$* -p$* $(YACCFLAGS) $<
 
+run_grass = \
+	GISRC=$(RUN_GISRC) \
+	GISBASE=$(RUN_GISBASE) \
+	PATH="$(GISBASE)/bin:$$PATH" \
+	PYTHONPATH="$(call mkpath,$(GISBASE)/etc/python,$$PYTHONPATH)" \
+	$(LD_LIBRARY_PATH_VAR)="$(BIN):$(ARCH_LIBDIR):$(BASE_LIBDIR):$($(LD_LIBRARY_PATH_VAR))" \
+	LC_ALL=C \
+	$(1)
 
 # default clean rules
 clean:

Modified: grass/branches/develbranch_6/lib/python/Makefile
===================================================================
--- grass/branches/develbranch_6/lib/python/Makefile	2010-07-28 07:49:16 UTC (rev 42915)
+++ grass/branches/develbranch_6/lib/python/Makefile	2010-07-28 08:43:53 UTC (rev 42916)
@@ -3,31 +3,37 @@
 include $(MODULE_TOPDIR)/include/Make/Platform.make
 include $(MODULE_TOPDIR)/include/Make/Grass.make
 include $(MODULE_TOPDIR)/include/Make/Rules.make
+include $(MODULE_TOPDIR)/include/Make/Python.make
 include $(MODULE_TOPDIR)/include/Make/Doxygen.make
 
 PYDIR = $(ETC)/python
 GDIR = $(PYDIR)/grass
 DSTDIR = $(GDIR)/script
 
-MODULES = core db raster vector
+MODULES = core db raster vector array
 
 PYFILES := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__)
+PYCFILES := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__)
 
-default: $(DSTDIR)
-	$(MAKE) $(PYFILES)
+CLEAN_SUBDIRS = ctypes
 
+default: $(PYFILES) $(PYCFILES) $(GDIR)/__init__.py $(GDIR)/__init__.pyc
+	-$(MAKE) -C ctypes || echo $(CURDIR)/ctypes >> $(ERRORLOG)
+
 $(PYDIR):
-	test -d $@ || $(MKDIR) -p $@
+	$(MKDIR) $@
 
-$(GDIR): $(PYDIR)
-	test -d $@ || $(MKDIR) -p $@
+$(GDIR): | $(PYDIR)
+	$(MKDIR) $@
 
-$(DSTDIR): $(GDIR)
-	test -d $@ || $(MKDIR) -p $@
-	@cat grass__init__.py > $(GDIR)/__init__.py
+$(DSTDIR): | $(GDIR)
+	$(MKDIR) $@
 
-$(DSTDIR)/%: %
+$(GDIR)/__init__.py: grass__init__.py | $(GDIR)
 	$(INSTALL_DATA) $< $@
 
+$(DSTDIR)/%: % | $(DSTDIR)
+	$(INSTALL_DATA) $< $@
+
 #doxygen:
 DOXNAME=grasspython


Property changes on: grass/branches/develbranch_6/lib/python/ctypes
___________________________________________________________________
Added: svn:ignore
   + imagery.py
arraystats.py
ogsf.py
stats.py
gmath.py
vector.py
display.py
proj.py
dbmi.py
vedit.py
date.py
g3d.py
grass.py
cluster.py
nviz.py
trans.py


Added: grass/branches/develbranch_6/lib/python/ctypes/Makefile
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/Makefile	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/Makefile	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,101 @@
+MODULE_TOPDIR = ../../..
+
+PACKAGE = "grasslibs"
+
+include $(MODULE_TOPDIR)/include/Make/Grass.make
+include $(MODULE_TOPDIR)/include/Make/Rules.make
+include $(MODULE_TOPDIR)/include/Make/Doxygen.make
+
+# doxygen:
+DOXNAME=
+DOXINPUT=grasspython.dox
+
+MODULES = date grass gmath proj imagery vector display stats \
+	dbmi g3d arraystats cluster trans vedit ogsf nviz
+
+date_LIBS       = $(DATETIMELIB)
+grass_LIBS      = $(GISLIB)
+gmath_LIBS      = $(GMATHLIB)
+proj_LIBS       = $(GPROJLIB)
+imagery_LIBS    = $(IMAGERYLIB)
+vector_LIBS     = $(VECTLIB)
+display_LIBS    = $(DISPLAYLIB)
+stats_LIBS      = $(STATSLIB)
+dbmi_LIBS       = $(DBMILIB)
+g3d_LIBS        = $(G3DLIB)
+arraystats_LIBS = $(ARRAYSTATSLIB)
+cluster_LIBS    = $(CLUSTERLIB)
+trans_LIBS      = $(TRANSLIB)
+vedit_LIBS      = $(VEDITLIB)
+ogsf_LIBS       = $(OGSFLIB)
+nviz_LIBS       = $(NVIZLIB)
+
+date_HDRS       = datetime.h P_datetime.h
+grass_HDRS      = gis.h gisdefs.h
+gmath_HDRS      = gmath.h
+proj_HDRS       = gprojects.h
+imagery_HDRS    = imagery.h imagedefs.h
+vector_HDRS     = Vect.h vect/dig_structs.h vect/dig_defines.h
+display_HDRS    = display.h
+stats_HDRS      = stats.h
+dbmi_HDRS       = dbmi.h proto_dbmi.h
+g3d_HDRS        = G3d.h
+arraystats_HDRS = arraystats.h
+cluster_HDRS    = cluster.h
+trans_HDRS      = transform.h
+vedit_HDRS      = vedit.h
+ogsf_HDRS       = ogsf_proto.h gstypes.h gsurf.h kftypes.h keyframe.h
+nviz_HDRS       = nviz.h
+
+proj_INC        = $(PROJINC)
+vector_INC      = $(VECT_INC) $(VECT_CFLAGS)
+vedit_INC       = $(VECT_INC) $(VECT_CFLAGS)
+
+SED = sed
+CTYPESGEN = ./ctypesgen.py
+CTYPESFLAGS = --cpp "$(CC) -E $(LFS_CFLAGS) $(EXTRA_CFLAGS) $(NLS_CFLAGS) $(DEFS) $(EXTRA_INC) $(INC)"
+EXTRA_CLEAN_FILES := $(foreach M,$(MODULES),$(M).py) $(wildcard ctypesgencore/*/*.pyc)
+
+ifneq ($(MINGW),)
+EXTRA_LIBS = $(INTLLIB)
+endif
+
+include $(MODULE_TOPDIR)/include/Make/Python.make
+
+PYDIR = $(ETC)/python
+GDIR = $(PYDIR)/grass
+DSTDIR = $(GDIR)/lib
+
+PYFILES  := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__ ctypes_preamble ctypes_loader)
+PYCFILES  := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__ ctypes_preamble ctypes_loader)
+LPYFILES := $(patsubst %,%.py,$(MODULES))
+
+ifneq ($(strip $(CTYPESGEN)),)
+default:
+	$(MAKE) $(DSTDIR)
+	$(MAKE) $(LPYFILES) $(PYFILES) $(PYCFILES)
+endif
+
+$(DSTDIR)/%.py: %.py | $(DSTDIR)
+	$(SED) -f fix.sed $< > $@
+
+$(DSTDIR)/ctypes_%.py: %.py | $(DSTDIR)
+	$(INSTALL_DATA) $< $@
+
+define module_rule
+$(1)_DEPS = $$(patsubst %.h,$(ARCH_INCDIR)/%.h,$$($(1)_HDRS))
+$(1).py: $$($(1)_DEPS)
+	$$(call run_grass,$(CTYPESGEN) $(CTYPESFLAGS) $$($(1)_LIBS) $$($(1)_INC) $(EXTRA_LIBS) $$($(1)_DEPS) -o $$@)
+endef
+$(foreach module,$(MODULES),$(eval $(call module_rule,$(module))))
+
+$(PYDIR):
+	$(MKDIR) $@
+
+$(GDIR): | $(PYDIR)
+	$(MKDIR) $@
+
+$(DSTDIR): | $(GDIR)
+	$(MKDIR) $@
+
+.SECONDARY: $(patsubst %,%.py,$(MODULES))

Added: grass/branches/develbranch_6/lib/python/ctypes/__init__.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/__init__.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/__init__.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,16 @@
+import date
+import grass
+import raster
+import gmath
+import proj
+import imagery
+import vector
+import display
+import stats
+import dbmi
+import g3d
+import arraystats
+import cluster
+import trans
+import vedit
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgen.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgen.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgen.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+
+def find_names_in_modules(modules):
+    names = set()
+    for module in modules:
+        try:
+            mod = __import__(module)
+        except:
+            pass
+        else:
+            names.union(dir(module))
+    return names
+
+import optparse, sys
+
+def option_callback_W(option, opt, value, parser):
+    # Options preceded by a "-Wl," are simply treated as though the "-Wl,"
+    # is not there? I don't understand the purpose of this code...
+    if len(value) < 4 or value[0:3] != 'l,-':
+        raise optparse.BadOptionError("not in '-Wl,<opt>' form: %s%s"
+                                      % (opt, value))
+    opt = value[2:]
+    if opt not in ['-L', '-R', '--rpath']:
+        raise optparse.BadOptionError("-Wl option must be -L, -R"
+                                      " or --rpath, not " + value[2:])
+    # Push the linker option onto the list for further parsing.
+    parser.rargs.insert(0, value)
+
+def option_callback_libdir(option, opt, value, parser):
+    # There are two sets of linker search paths: those for use at compile time
+    # and those for use at runtime. Search paths specified with -L, -R, or
+    # --rpath are added to both sets.
+    parser.values.compile_libdirs.append(value)
+    parser.values.runtime_libdirs.append(value)
+
+import ctypesgencore
+import ctypesgencore.messages as msgs
+
+if __name__=="__main__":
+    usage = 'usage: %prog [options] /path/to/header.h ...'
+    op = optparse.OptionParser(usage=usage)
+    
+    # Parameters
+    op.add_option('-o', '--output', dest='output', metavar='FILE',
+        help='write wrapper to FILE')
+    op.add_option('-l', '--library', dest='libraries', action='append',
+        default=[], metavar='LIBRARY', help='link to LIBRARY')
+    op.add_option('', '--include', dest='other_headers', action='append',
+        default=[], metavar='HEADER',
+        help='include system header HEADER (e.g. stdio.h or stdlib.h)')
+    op.add_option('-m', '--module', '--link-module', action='append',
+        dest='modules', metavar='MODULE', default=[],
+        help='use symbols from Python module MODULE')
+    op.add_option('-I', '--includedir', dest='include_search_paths',
+        action='append', default=[], metavar='INCLUDEDIR',
+        help='add INCLUDEDIR as a directory to search for headers')
+    op.add_option('-W', action="callback", callback=option_callback_W,
+        metavar="l,OPTION", type="str",
+        help="where OPTION is -L, -R, or --rpath")
+    op.add_option("-L", "-R", "--rpath", "--libdir", action="callback",
+        callback=option_callback_libdir, metavar="LIBDIR", type="str",
+        help="Add LIBDIR to the search path (both compile-time and run-time)")
+    op.add_option('', "--compile-libdir", action="append",
+        dest="compile_libdirs", metavar="LIBDIR", default=[],
+        help="Add LIBDIR to the compile-time library search path.")
+    op.add_option('', "--runtime-libdir", action="append",
+        dest="runtime_libdirs", metavar="LIBDIR", default=[],
+        help="Add LIBDIR to the run-time library search path.")
+    
+    # Parser options
+    op.add_option('', '--cpp', dest='cpp', default='gcc -E',
+        help='The command to invoke the c preprocessor, including any ' \
+             'necessary options (default: gcc -E)')
+    op.add_option('', '--save-preprocessed-headers', metavar='FILENAME',
+        dest='save_preprocessed_headers', default=None,
+        help='Save the preprocessed headers to the specified FILENAME')
+    
+    # Processor options
+    op.add_option('-a', '--all-headers', action='store_true',
+        dest='all_headers', default=False,
+        help='include symbols from all headers, including system headers')
+    op.add_option('', '--builtin-symbols', action='store_true',
+        dest='builtin_symbols', default=False,
+        help='include symbols automatically generated by the preprocessor')
+    op.add_option('', '--no-macros', action='store_false', dest='include_macros',
+        default=True, help="Don't output macros.")
+    op.add_option('-i', '--include-symbols', dest='include_symbols',
+        default=None, help='regular expression for symbols to always include')
+    op.add_option('-x', '--exclude-symbols', dest='exclude_symbols',
+        default=None, help='regular expression for symbols to exclude')
+    
+    # Printer options
+    op.add_option('', '--header-template', dest='header_template', default=None,
+        metavar='TEMPLATE',
+        help='Use TEMPLATE as the header template in the output file.')
+    op.add_option('', '--strip-build-path', dest='strip_build_path',
+        default=None, metavar='BUILD_PATH',
+        help='Strip build path from header paths in the wrapper file.')
+    op.add_option('', '--insert-file', dest='inserted_files', default=[],
+        action='append', metavar='FILENAME',
+        help='Add the contents of FILENAME to the end of the wrapper file.')
+    
+    # Error options
+    op.add_option('', "--all-errors", action="store_true", default=False,
+        dest="show_all_errors", help="Display all warnings and errors even " \
+             "if they would not affect output.")
+    op.add_option('', "--show-long-errors", action="store_true", default=False,
+        dest="show_long_errors", help="Display long error messages " \
+            "instead of abbreviating error messages.")
+    op.add_option('', "--no-macro-warnings", action="store_false", default=True,
+        dest="show_macro_warnings", help="Do not print macro warnings.")
+
+    op.set_defaults(**ctypesgencore.options.default_values)
+    
+    (options, args) = op.parse_args(list(sys.argv[1:]))
+    options.headers = args
+
+    # Figure out what names will be defined by imported Python modules
+    options.other_known_names = find_names_in_modules(options.modules)
+    
+    # Required parameters
+    if len(args) < 1:
+        msgs.error_message('No header files specified', cls='usage')
+        sys.exit(1)
+
+    if options.output is None:
+        msgs.error_message('No output file specified', cls='usage')
+        sys.exit(1)
+
+    if len(options.libraries) == 0:
+        msgs.warning_message('No libraries specified', cls='usage')
+    
+    # Step 1: Parse
+    descriptions=ctypesgencore.parser.parse(options.headers,options)
+    
+    # Step 2: Process
+    ctypesgencore.processor.process(descriptions,options)
+    
+    # Step 3: Print
+    ctypesgencore.printer.WrapperPrinter(options.output,options,descriptions)
+    
+    msgs.status_message("Wrapping complete.")
+    
+    # Correct what may be a common mistake
+    if descriptions.all == []:
+        if not options.all_headers:
+            msgs.warning_message("There wasn't anything of use in the " \
+                "specified header file(s). Perhaps you meant to run with " \
+                "--all-headers to include objects from included sub-headers? ",
+                cls = 'usage')


Property changes on: grass/branches/develbranch_6/lib/python/ctypes/ctypesgen.py
___________________________________________________________________
Added: svn:executable
   + *

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/LICENSE
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/LICENSE	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/LICENSE	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,26 @@
+Copyright (c) 2007-2008, Ctypesgen Developers
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice,
+   this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of the <ORGANIZATION> nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/__init__.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/__init__.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/__init__.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+"""
+Ctypesgencore is the module that contains the main body of ctypesgen - in fact,
+it contains everything but the command-line interface.
+
+ctypesgen's job is divided into three steps:
+
+Step 1: Parse
+
+Ctypesgen reads the input header files and parses them. It generates a list of
+function, variable, struct, union, enum, constant, typedef, and macro
+descriptions from the input files. These descriptions are encapsulated as
+ctypesgen.descriptions.Description objects.
+
+The package ctypesgen.parser is responsible for the parsing stage.
+
+Step 2: Process
+
+Ctypesgen processes the list of descriptions from the parsing stage. This is
+the stage where ctypesgen resolves name conflicts and filters descriptions using
+the regexes specified on the command line. Other processing steps take place
+at this stage, too. When processing is done, ctypesgen finalizes which
+descriptions will be included in the output file.
+
+The package ctypesgen.processor is responsible for the processing stage.
+
+Step 3: Print
+
+Ctypesgen writes the descriptions to the output file, along with a header.
+
+The package ctypesgen.printer is responsible for the printing stage.
+
+There are three modules in ctypesgencore that describe the format that the
+parser, processor, and printer modules use to pass information. They are:
+
+* descriptions: Classes to represent the descriptions.
+
+* ctypedecls: Classes to represent C types.
+
+* expressions: Classes to represent an expression in a language-independent
+format.
+"""
+
+
+__all__ = ["parser","processor","printer",
+           "descriptions","ctypedescs","expressions",
+           "messages","options"]
+
+# Workhorse modules
+import parser
+import processor
+import printer
+
+# Modules describing internal format
+import descriptions
+import ctypedescs
+import expressions
+
+# Helper modules
+import messages
+import options

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/ctypedescs.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/ctypedescs.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/ctypedescs.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,314 @@
+#!/usr/bin/env python
+
+'''
+ctypesgencore.ctypedescs contains classes to represent a C type. All of them
+classes are subclasses of CtypesType.
+
+Unlike in previous versions of ctypesgen, CtypesType and its subclasses are
+completely independent of the parser module.
+
+The most important method of CtypesType and its subclasses is the py_string
+method. str(ctype) returns a string which, when evaluated in the wrapper
+at runtime, results in a ctypes type object.
+
+For example, a CtypesType
+representing an array of four integers could be created using:
+
+>>> ctype = CtypesArray(CtypesSimple("int",True,0),4)
+
+str(ctype) would evaluate to "c_int * 4".
+'''
+
+import warnings
+
+__docformat__ = 'restructuredtext'
+
+ctypes_type_map = {
+   # typename   signed  longs
+    ('void',    True,   0): 'None',
+    ('int',     True,   0): 'c_int',
+    ('int',     False,  0): 'c_uint',
+    ('int',     True,   1): 'c_long',
+    ('int',     False,  1): 'c_ulong',
+    ('int',     True,   2): 'c_longlong',
+    ('int',     False,  2): 'c_ulonglong',
+    ('char',    True,   0): 'c_char',
+    ('char',    False,  0): 'c_ubyte',
+    ('short',   True,   0): 'c_short',
+    ('short',   False,  0): 'c_ushort',
+    ('float',   True,   0): 'c_float',
+    ('double',  True,   0): 'c_double',
+    ('size_t',  True,   0): 'c_size_t',
+    ('int8_t',  True,   0): 'c_int8',
+    ('int16_t', True,   0): 'c_int16',
+    ('int32_t', True,   0): 'c_int32',
+    ('int64_t', True,   0): 'c_int64',
+    ('apr_int64_t',True,0): 'c_int64',
+    ('off64_t', True,   0): 'c_int64',
+    ('uint8_t', True,   0): 'c_uint8',
+    ('uint16_t',True,   0): 'c_uint16',
+    ('uint32_t',True,   0): 'c_uint32',
+    ('uint64_t',True,   0): 'c_uint64',
+    ('apr_uint64_t',True,0): 'c_uint64',
+    ('wchar_t', True,   0): 'c_wchar',
+    ('ptrdiff_t',True,  0): 'c_ptrdiff_t',  # Requires definition in preamble
+    ('ssize_t', True,   0): 'c_ptrdiff_t',  # Requires definition in preamble
+    ('va_list', True,   0): 'c_void_p',
+}
+
+# This protocol is used for walking type trees.
+class CtypesTypeVisitor(object):
+    def visit_struct(self, struct):
+        pass
+
+    def visit_enum(self, enum):
+        pass
+    
+    def visit_typedef(self, name):
+        pass
+    
+    def visit_error(self, error, cls):
+        pass
+    
+    def visit_identifier(self, identifier):
+        # This one comes from inside ExpressionNodes. There may be
+        # ExpressionNode objects in array count expressions.
+        pass
+
+def visit_type_and_collect_info(ctype):
+    class Visitor(CtypesTypeVisitor):
+        def visit_struct(self,struct):
+            structs.append(struct)
+        def visit_enum(self,enum):
+            enums.append(enum)
+        def visit_typedef(self,typedef):
+            typedefs.append(typedef)
+        def visit_error(self,error,cls):
+            errors.append((error,cls))
+        def visit_identifier(self,identifier):
+            identifiers.append(identifier)
+    structs = []
+    enums = []
+    typedefs = []
+    errors = []
+    identifiers = []
+    v = Visitor()
+    ctype.visit(v)
+    return structs,enums,typedefs,errors,identifiers
+
+# Remove one level of indirection from funtion pointer; needed for typedefs
+# and function parameters.
+def remove_function_pointer(t):
+    if type(t) == CtypesPointer and type(t.destination) == CtypesFunction:
+        return t.destination
+    elif type(t) == CtypesPointer:
+        t.destination = remove_function_pointer(t.destination)
+        return t
+    else:
+        return t
+
+class CtypesType(object):
+    def __init__(self):
+        self.errors=[]
+    
+    def __repr__(self):
+        return "<Ctype \"%s\">" % self.py_string()
+    
+    def error(self,message,cls=None):
+        self.errors.append((message,cls))
+    
+    def visit(self,visitor):
+        for error,cls in self.errors:
+            visitor.visit_error(error,cls)
+
+class CtypesSimple(CtypesType):
+    """Represents a builtin type, like "char" or "int"."""
+    def __init__(self, name, signed, longs):
+        CtypesType.__init__(self)
+        self.name = name
+        self.signed = signed
+        self.longs = longs
+
+    def py_string(self):
+        return ctypes_type_map[(self.name,self.signed,self.longs)]
+
+class CtypesSpecial(CtypesType):
+    def __init__(self,name):
+        CtypesType.__init__(self)
+        self.name = name
+    
+    def py_string(self):
+        return self.name
+
+class CtypesTypedef(CtypesType):
+    """Represents a type defined by a typedef."""
+    def __init__(self, name):
+        CtypesType.__init__(self)
+        self.name = name
+    
+    def visit(self,visitor):
+        if not self.errors:
+            visitor.visit_typedef(self.name)
+        CtypesType.visit(self,visitor)
+    
+    def py_string(self):
+        return self.name
+
+class CtypesBitfield(CtypesType):
+    def __init__(self, base, bitfield):
+        CtypesType.__init__(self)
+        self.base = base
+        self.bitfield = bitfield
+    
+    def visit(self,visitor):
+        self.base.visit(visitor)
+        CtypesType.visit(self,visitor)
+    
+    def py_string(self):
+        return self.base.py_string()
+
+class CtypesPointer(CtypesType):
+    def __init__(self, destination, qualifiers):
+        CtypesType.__init__(self)
+        self.destination = destination
+        self.qualifiers = qualifiers
+    
+    def visit(self,visitor):
+        if self.destination:
+            self.destination.visit(visitor)
+        CtypesType.visit(self,visitor)
+
+    def py_string(self):
+        return 'POINTER(%s)' % self.destination.py_string()
+
+class CtypesArray(CtypesType):
+    def __init__(self, base, count):
+        CtypesType.__init__(self)
+        self.base = base
+        self.count = count
+    
+    def visit(self,visitor):
+        self.base.visit(visitor)
+        if self.count:
+            self.count.visit(visitor)
+        CtypesType.visit(self,visitor)
+    
+    def py_string(self):
+        if self.count is None:
+            return 'POINTER(%s)' % self.base.py_string()
+        if type(self.base) == CtypesArray:
+            return '(%s) * %s' % (self.base.py_string(),
+                                  self.count.py_string(False))
+        else:
+            return '%s * %s' % (self.base.py_string(),
+                                self.count.py_string(False))
+
+class CtypesFunction(CtypesType):
+    def __init__(self, restype, parameters, variadic=False):
+        CtypesType.__init__(self)
+        self.restype = restype
+
+        # Don't allow POINTER(None) (c_void_p) as a restype... causes errors
+        # when ctypes automagically returns it as an int.
+        # Instead, convert to POINTER(c_void).  c_void is not a ctypes type,
+        # you can make it any arbitrary type.
+        if type(self.restype) == CtypesPointer and \
+           type(self.restype.destination) == CtypesSimple and \
+           self.restype.destination.name == 'None':
+            self.restype = CtypesPointer(CtypesSpecial('c_void'), ())
+
+        # Return 'ReturnString' instead of simply 'String'
+        if self.restype.py_string() == 'POINTER(c_char)':
+            self.restype = CtypesSpecial('ReturnString')
+
+        self.argtypes = [remove_function_pointer(p) for p in parameters]
+        self.variadic = variadic
+    
+    def visit(self,visitor):
+        self.restype.visit(visitor)
+        for a in self.argtypes:
+            a.visit(visitor)
+        CtypesType.visit(self,visitor)
+
+    def py_string(self):
+        return 'CFUNCTYPE(UNCHECKED(%s), %s)' % (self.restype.py_string(),
+            ', '.join([a.py_string() for a in self.argtypes]))
+
+last_tagnum = 0
+def anonymous_struct_tag():
+    global last_tagnum
+    last_tagnum += 1
+    return 'anon_%d' % last_tagnum
+
+class CtypesStruct(CtypesType):
+    def __init__(self, tag, variety, members, src=None):
+        CtypesType.__init__(self)
+        self.tag = tag
+        self.variety = variety # "struct" or "union"
+        self.members = members
+        
+        if not self.tag:
+            self.tag = anonymous_struct_tag()
+            self.anonymous = True
+        else:
+            self.anonymous = False
+        
+        if self.members==None:
+            self.opaque = True
+        else:
+            self.opaque = False
+        
+        self.src = src        
+    
+    def get_required_types(self):
+        types = CtypesType.get_required_types(self)
+        types.add((self.variety,self.tag))
+        return types
+    
+    def visit(self,visitor):
+        visitor.visit_struct(self)
+        if not self.opaque:
+            for name,ctype in self.members:
+                ctype.visit(visitor)
+        CtypesType.visit(self,visitor)
+    
+    def get_subtypes(self):
+        if self.opaque:
+            return set()
+        else:
+            return set([m[1] for m in self.members])
+
+    def py_string(self):
+        return "%s_%s" % (self.variety,self.tag)
+
+last_tagnum = 0
+def anonymous_enum_tag():
+    global last_tagnum
+    last_tagnum += 1
+    return 'anon_%d' % last_tagnum
+
+class CtypesEnum(CtypesType):
+    def __init__(self, tag, enumerators, src=None):
+        CtypesType.__init__(self)
+        self.tag = tag
+        self.enumerators = enumerators
+        
+        if not self.tag:
+            self.tag = anonymous_enum_tag()
+            self.anonymous = True
+        else:
+            self.anonymous = False
+        
+        if self.enumerators==None:
+            self.opaque = True
+        else:
+            self.opaque = False
+        
+        self.src = src
+        
+    def visit(self,visitor):
+        visitor.visit_enum(self)
+        CtypesType.visit(self,visitor)
+
+    def py_string(self):
+        return 'enum_%s' % self.tag

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/descriptions.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/descriptions.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/descriptions.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+
+"""
+ctypesgencore.descriptions contains classes to represent a description of a
+struct, union, enum, function, constant, variable, or macro. All the
+description classes are subclassed from an abstract base class, Description.
+The descriptions module also contains a class, DescriptionCollection, to hold
+lists of Description objects.
+"""
+
+class DescriptionCollection(object):
+    """Represents a collection of Descriptions."""
+    def __init__(self,constants,typedefs,structs,enums,functions,variables,
+                 macros,all,output_order):
+        self.constants=constants
+        self.typedefs=typedefs
+        self.structs=structs
+        self.enums=enums
+        self.functions=functions
+        self.variables=variables
+        self.macros=macros
+        self.all=all
+        self.output_order=output_order
+
+class Description(object):
+    """Represents a constant, typedef, struct, function, variable, enum,
+    or macro description. Description is an abstract base class."""
+    def __init__(self,src=None):
+        self.src=src # A tuple of (filename, lineno)
+        
+        # If object will be included in output file. Values are "yes", "never",
+        # and "if_needed".
+        self.include_rule="yes" 
+        
+        # A word about requirements, and dependents:
+        # If X requires Y, Y is in X.requirements.
+        # If X is in Y.requirements, then Y is in X.dependents.
+        self.requirements=set()
+        self.dependents=set()
+        
+        # If the processor module finds a fatal error that prevents a
+        # a description from being output, then it appends a string describing
+        # the problem to 'errors'. If it finds a nonfatal error, it appends a
+        # string to 'warnings'. If the description would have been output, then
+        # the errors and warnings are printed.
+        
+        # If there is anything in 'errors' after processing is complete, the
+        # description is not output.
+        
+        self.errors=[] 
+        self.warnings=[]
+    
+    def add_requirements(self,reqs):
+        self.requirements = self.requirements.union(reqs)
+        for req in reqs:
+            req.dependents.add(self)
+    
+    def error(self,msg,cls = None):
+        self.errors.append((msg,cls))
+    def warning(self,msg,cls = None):
+        self.warnings.append((msg,cls))
+    
+    def __repr__(self):
+        return "<Description: %s>" % self.casual_name()
+    
+    def casual_name(self):
+        """Return a name to show the user."""
+    def py_name(self):
+        """Return the name associated with this description in Python code."""
+    def c_name(self):
+        """Return the name associated with this description in C code."""
+
+class ConstantDescription(Description):
+    """Simple class to contain information about a constant."""
+    def __init__(self,name,value,src=None):
+        Description.__init__(self,src)
+        # Name of constant, a string
+        self.name=name 
+        # Value of constant, as an ExpressionNode object
+        self.value=value 
+    def casual_name(self):
+        return "Constant \"%s\""%self.name
+    def py_name(self):
+        return self.name
+    def c_name(self):
+        return self.name
+
+class TypedefDescription(Description):
+    """Simple container class for a type definition."""
+    def __init__(self,name,ctype,src=None):
+        Description.__init__(self,src)
+        self.name=name # Name, a string
+        self.ctype=ctype # The base type as a ctypedescs.CtypeType object
+    def casual_name(self):
+        return "Typedef \"%s\""%self.name
+    def py_name(self):
+        return self.name
+    def c_name(self):
+        return self.name
+
+class StructDescription(Description):
+    """Simple container class for a structure or union definition."""
+    def __init__(self,tag,variety,members,opaque,ctype,src=None):
+        Description.__init__(self,src)
+        # The name of the structure minus the "struct" or "union"
+        self.tag=tag 
+        # A string "struct" or "union"
+        self.variety=variety 
+        # A list of pairs of (name,ctype)
+        self.members=members 
+        # True if struct body was not specified in header file
+        self.opaque=opaque 
+        # The original CtypeStruct that created the struct
+        self.ctype=ctype 
+    def casual_name(self):
+        return "%s \"%s\""%(self.variety.capitalize(),self.tag)
+    def py_name(self):
+        return "%s_%s"%(self.variety,self.tag)
+    def c_name(self):
+        return "%s %s"%(self.variety,self.tag)
+
+class EnumDescription(Description):
+    """Simple container class for an enum definition."""
+    def __init__(self,tag,members,ctype,src=None):
+        Description.__init__(self,src)
+        # The name of the enum, minus the "enum"
+        self.tag=tag 
+        # A list of (name,value) pairs where value is a number
+        self.members=members 
+        # The original CtypeEnum that created the enum
+        self.ctype=ctype 
+    def casual_name(self):
+        return "Enum \"%s\""%self.tag
+    def py_name(self):
+        return "enum_%s"%self.tag
+    def c_name(self):
+        return "enum %s"%self.tag
+
+class FunctionDescription(Description):
+    """Simple container class for a C function."""
+    def __init__(self,name,restype,argtypes,variadic=False,src=None):
+        Description.__init__(self,src)
+        # Name, a string
+        self.name=name 
+        # Name according to C - stored in case description is renamed
+        self.cname=name 
+        # A ctype representing return type
+        self.restype=restype 
+        # A list of ctypes representing the argument types
+        self.argtypes=argtypes 
+        # Does this function accept a variable number of arguments?
+        self.variadic=variadic 
+    def casual_name(self):
+        return "Function \"%s\""%self.name
+    def py_name(self):
+        return self.name
+    def c_name(self):
+        return self.cname
+
+class VariableDescription(Description):
+    """Simple container class for a C variable declaration."""
+    def __init__(self,name,ctype,src=None):
+        Description.__init__(self,src)
+        # Name, a string
+        self.name=name 
+        # Name according to C - stored in case description is renamed
+        self.cname=name 
+        # The type of the variable
+        self.ctype=ctype 
+    def casual_name(self):
+        return "Variable \"%s\""%self.name
+    def py_name(self):
+        return self.name
+    def c_name(self):
+        return self.cname
+
+class MacroDescription(Description):
+    """Simple container class for a C macro."""
+    def __init__(self,name,params,expr,src=None):
+        Description.__init__(self,src)
+        self.name = name
+        self.params = params
+        self.expr = expr # ExpressionNode for the macro's body
+    def casual_name(self):
+        return "Macro \"%s\""%self.name
+    def py_name(self):
+        return self.name
+    def c_name(self):
+        return self.name
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/expressions.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/expressions.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/expressions.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,305 @@
+#!/usr/bin/env python
+
+'''
+The expressions module contains classes to represent an expression. The main
+class is ExpressionNode. ExpressionNode's most useful method is py_string(),
+which returns a Python string representing that expression.
+'''
+
+from ctypedescs import *
+import keyword
+
+# Right now, the objects in this module are all oriented toward evaluation.
+# However, they don't have to be, since ctypes objects are mutable. For example,
+# shouldn't it be possible to translate the macro:
+#
+#   #define INCREMENT(x) ++x
+#
+# into Python? The resulting code should be:
+#
+#   def INCREMENT(x):
+#       x.value+=1
+#       return x.value
+#
+# On the other hand, this would be a challenge to write.
+
+class EvaluationContext(object):
+    '''Interface for evaluating expression nodes.
+    '''
+    def evaluate_identifier(self, name):
+        warnings.warn('Attempt to evaluate identifier "%s" failed' % name)
+        return 0
+
+    def evaluate_sizeof(self, type):
+        warnings.warn('Attempt to evaluate sizeof "%s" failed' % str(type))
+        return 0
+    
+    def evaluate_sizeof(self, object):
+        warnings.warn('Attempt to evaluate sizeof object "%s" failed' % str(object))
+        return 0
+    
+    def evaluate_parameter(self, name):
+        warnings.warn('Attempt to evaluate parameter "%s" failed' % name)
+        return 0
+
+class ExpressionNode(object):
+    def __init__(self):
+        self.errors = []
+    
+    def error(self,message,cls = None):
+        self.errors.append((message,cls))
+    
+    def __repr__(self):
+        try:
+            string = repr(self.py_string(True))
+        except ValueError:
+            string = "<error in expression node>"
+        return "<ExpressionNode: %s>" % string
+    
+    def visit(self,visitor):
+        for error,cls in self.errors:
+            visitor.visit_error(error,cls)
+
+class ConstantExpressionNode(ExpressionNode):
+    def __init__(self, value):
+        ExpressionNode.__init__(self)
+        self.value = value
+    
+    def evaluate(self, context):
+        return self.value
+
+    def py_string(self, can_be_ctype):
+        if self.value == float('inf'):
+            return "float('inf')"
+        elif self.value == float('-inf'):
+            return "float('-inf')"
+        return repr(self.value)
+
+class IdentifierExpressionNode(ExpressionNode):
+    def __init__(self, name):
+        ExpressionNode.__init__(self)
+        self.name = name
+
+    def evaluate(self, context):
+        return context.evaluate_identifier(self.name)
+    
+    def visit(self, visitor):
+        visitor.visit_identifier(self.name)
+        ExpressionNode.visit(self,visitor)
+    
+    def py_string(self, can_be_ctype):
+        # Errors will be thrown in generated code if identifier evaluates
+        # to a ctypes object, and can_be_ctype is False.
+        return self.name
+
+class ParameterExpressionNode(ExpressionNode):
+    def __init__(self, name):
+        ExpressionNode.__init__(self)
+        self.name = name
+    
+    def evaluate(self, context):
+        return context.evaluate_parameter(self.name)
+    
+    def visit(self, visitor):
+        ExpressionNode.visit(self,visitor)
+    
+    def py_string(self, can_be_ctype):
+        # Errors will be thrown in generated code if parameter is
+        # a ctypes object, and can_be_ctype is False.
+        return self.name
+
+class UnaryExpressionNode(ExpressionNode):
+    def __init__(self, name, op, format, child_can_be_ctype, child):
+        ExpressionNode.__init__(self)
+        self.name = name
+        self.op = op
+        self.format = format
+        self.child_can_be_ctype = child_can_be_ctype
+        self.child = child
+    
+    def visit(self, visitor):
+        self.child.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self, context):
+        if self.op:
+            return self.op(self.child.evaluate(context))
+        else:
+            raise ValueError,"The C operator \"%s\" can't be evaluated right " \
+                "now" % self.name
+
+    def py_string(self, can_be_ctype):
+        return self.format % \
+            self.child.py_string(self.child_can_be_ctype and can_be_ctype)
+
+class SizeOfExpressionNode(ExpressionNode):
+    def __init__(self, child):
+        ExpressionNode.__init__(self)
+        self.child = child
+    
+    def visit(self, visitor):
+        self.child.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self, context):
+        if isinstance(self.child, CtypesType):
+            return context.evaluate_sizeof(self.child)
+        else:
+            return context.evaluate_sizeof_object(self.child)
+
+    def py_string(self, can_be_ctype):
+        if isinstance(self.child, CtypesType):
+            return 'sizeof(%s)' % self.child.py_string()
+        else:
+            return 'sizeof(%s)' % self.child.py_string(True)
+
+class BinaryExpressionNode(ExpressionNode):
+    def __init__(self, name, op, format, can_be_ctype, left, right):
+        ExpressionNode.__init__(self)
+        self.name = name
+        self.op = op
+        self.format = format
+        self.can_be_ctype = can_be_ctype
+        self.left = left
+        self.right = right
+    
+    def visit(self, visitor):
+        self.left.visit(visitor)
+        self.right.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self, context):
+        if self.op:
+           return self.op(self.left.evaluate(context), 
+                          self.right.evaluate(context))
+        else:
+            raise ValueError,"The C operator \"%s\" can't be evaluated right " \
+                "now" % self.name
+
+    def py_string(self, can_be_ctype):
+        return self.format % \
+            (self.left.py_string(self.can_be_ctype[0] and can_be_ctype),
+             self.right.py_string(self.can_be_ctype[0] and can_be_ctype))
+
+class ConditionalExpressionNode(ExpressionNode):
+    def __init__(self, cond, yes, no):
+        ExpressionNode.__init__(self)
+        self.cond = cond
+        self.yes = yes
+        self.no = no
+    
+    def visit(self, visitor):
+        self.cond.visit(visitor)
+        self.yes.visit(visitor)
+        self.no.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self, context):
+        if self.cond.evaluate(context):
+            return self.yes.evaluate(context)
+        else:
+            return self.no.evaluate(context)
+
+    def py_string(self, can_be_ctype):
+        return "%s and %s or %s" % \
+            (self.cond.py_string(True),
+             self.yes.py_string(can_be_ctype),
+             self.no.py_string(can_be_ctype))
+
+class AttributeExpressionNode(ExpressionNode):
+    def __init__(self, op, format, base, attribute):
+        ExpressionNode.__init__(self)
+        self.op = op
+        self.format = format
+        self.base = base
+        self.attribute = attribute
+        
+        # Attribute access will raise parse errors if you don't do this. 
+        # Fortunately, the processor module does the same thing to 
+        # the struct member name.
+        if self.attribute in keyword.kwlist:
+            self.attribute = "_"+self.attribute
+    
+    def visit(self,visitor):
+        self.base.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self, context):
+        return self.op(self.base.evalute(context),self.attribute)
+    
+    def py_string(self, can_be_ctype):
+        if can_be_ctype:
+            return self.format % (self.base.py_string(can_be_ctype),
+                                  self.attribute)
+        else:
+            return "(%s.value)" % (self.format % \
+                    (self.base.py_string(can_be_ctype), self.attribute))
+
+class CallExpressionNode(ExpressionNode):
+    def __init__(self,function,arguments):
+        ExpressionNode.__init__(self)
+        self.function = function
+        self.arguments = arguments
+    
+    def visit(self,visitor):
+        self.function.visit(visitor)
+        for arg in self.arguments:
+            arg.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self,context):
+        arguments = [arg.evaluate(context) for arg in self.arguments]
+        return self.function.evaluate(context)(*arguments)
+    
+    def py_string(self, can_be_ctype):
+        function = self.function.py_string(can_be_ctype)
+        arguments = [x.py_string(can_be_ctype) for x in self.arguments]
+        if can_be_ctype:
+            return '(%s (%s))' % (function,", ".join(arguments))
+        else:
+            return '((%s (%s)).value)' % (function,", ".join(arguments))
+
+# There seems not to be any reasonable way to translate C typecasts
+# into Python. Ctypesgen doesn't try, except for the special case of NULL.
+class TypeCastExpressionNode(ExpressionNode):
+    def __init__(self, base, ctype):
+        ExpressionNode.__init__(self)
+        self.base = base
+        self.ctype = ctype
+        self.isnull = isinstance(ctype, CtypesPointer) and \
+                      isinstance(base, ConstantExpressionNode) and \
+                      base.value == 0
+    
+    def visit(self,visitor):
+        # No need to visit ctype because it isn't actually used
+        self.base.visit(visitor)
+        ExpressionNode.visit(self,visitor)
+    
+    def evaluate(self,context):
+        if self.isnull:
+            return None
+        else:
+            return self.base.evaluate(context)
+    
+    def py_string(self, can_be_ctype):
+        if self.isnull:
+            return "None"
+        else:
+            return self.base.py_string(can_be_ctype)
+
+class UnsupportedExpressionNode(ExpressionNode):
+    def __init__(self,message):
+        ExpressionNode.__init__(self)
+        self.message = message
+        self.error(message,'unsupported-type')
+    
+    def evaluate(self,context):
+        raise ValueError, "Tried to evaluate an unsupported expression " \
+            "node: %s" % self.message
+    
+    def __repr__(self):
+        return "<UnsupportedExpressionNode>"
+    
+    def py_string(self, can_be_ctype):
+        raise ValueError, "Called py_string() an unsupported expression " \
+            "node: %s" % self.message
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/libraryloader.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/libraryloader.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/libraryloader.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,248 @@
+# ----------------------------------------------------------------------------
+# Copyright (c) 2008 David James
+# Copyright (c) 2006-2008 Alex Holkner
+# All rights reserved.
+# 
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions 
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright 
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#  * Neither the name of pyglet nor the names of its
+#    contributors may be used to endorse or promote products
+#    derived from this software without specific prior written
+#    permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# ----------------------------------------------------------------------------
+
+import os.path, re, sys, glob
+import ctypes
+import ctypes.util
+
+def _environ_path(name):
+    if name in os.environ:
+        return os.environ[name].split(":")
+    else:
+        return []
+
+class LibraryLoader(object):
+    def __init__(self):
+        self.other_dirs=[]
+    
+    def load_library(self,libname):
+        """Given the name of a library, load it."""
+        paths = self.getpaths(libname)
+        
+        for path in paths:
+            if os.path.exists(path):
+                return self.load(path)
+        
+        raise ImportError,"%s not found." % libname
+    
+    def load(self,path):
+        """Given a path to a library, load it."""
+        try:
+            # Darwin requires dlopen to be called with mode RTLD_GLOBAL instead
+            # of the default RTLD_LOCAL.  Without this, you end up with
+            # libraries not being loadable, resulting in "Symbol not found"
+            # errors
+            if sys.platform == 'darwin':
+                return ctypes.CDLL(path, ctypes.RTLD_GLOBAL)
+            else:
+                return ctypes.cdll.LoadLibrary(path)
+        except OSError,e:
+            raise ImportError,e
+    
+    def getpaths(self,libname):
+        """Return a list of paths where the library might be found."""
+        if os.path.isabs(libname):
+            yield libname
+        
+        else:
+            for path in self.getplatformpaths(libname):
+                yield path
+            
+            path = ctypes.util.find_library(libname)
+            if path: yield path
+    
+    def getplatformpaths(self, libname):
+        return []
+
+# Darwin (Mac OS X)
+
+class DarwinLibraryLoader(LibraryLoader):
+    name_formats = ["lib%s.dylib", "lib%s.so", "lib%s.bundle", "%s.dylib",
+                "%s.so", "%s.bundle", "%s"]
+    
+    def getplatformpaths(self,libname):
+        if os.path.pathsep in libname:
+            names = [libname]
+        else:
+            names = [format % libname for format in self.name_formats]
+        
+        for dir in self.getdirs(libname):
+            for name in names:
+                yield os.path.join(dir,name)
+    
+    def getdirs(self,libname):
+        '''Implements the dylib search as specified in Apple documentation:
+        
+        http://developer.apple.com/documentation/DeveloperTools/Conceptual/
+            DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
+
+        Before commencing the standard search, the method first checks
+        the bundle's ``Frameworks`` directory if the application is running
+        within a bundle (OS X .app).
+        '''
+
+        dyld_fallback_library_path = _environ_path("DYLD_FALLBACK_LIBRARY_PATH")
+        if not dyld_fallback_library_path:
+            dyld_fallback_library_path = [os.path.expanduser('~/lib'),
+                                          '/usr/local/lib', '/usr/lib']
+        
+        dirs = []
+        
+        if '/' in libname:
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+        else:
+            dirs.extend(_environ_path("LD_LIBRARY_PATH"))
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+
+        dirs.extend(self.other_dirs)
+        dirs.append(".")
+        
+        if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app':
+            dirs.append(os.path.join(
+                os.environ['RESOURCEPATH'],
+                '..',
+                'Frameworks'))
+
+        dirs.extend(dyld_fallback_library_path)
+        
+        return dirs
+
+# Posix
+
+class PosixLibraryLoader(LibraryLoader):
+    _ld_so_cache = None
+    
+    def _create_ld_so_cache(self):
+        # Recreate search path followed by ld.so.  This is going to be
+        # slow to build, and incorrect (ld.so uses ld.so.cache, which may
+        # not be up-to-date).  Used only as fallback for distros without
+        # /sbin/ldconfig.
+        #
+        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.
+
+        directories = []
+        for name in ("LD_LIBRARY_PATH",
+                     "SHLIB_PATH", # HPUX
+                     "LIBPATH", # OS/2, AIX
+                     "LIBRARY_PATH", # BE/OS
+                    ):
+            if name in os.environ:
+                directories.extend(os.environ[name].split(os.pathsep))
+        directories.extend(self.other_dirs)
+        directories.append(".")
+
+        try: directories.extend([dir.strip() for dir in open('/etc/ld.so.conf')])
+        except IOError: pass
+
+        directories.extend(['/lib', '/usr/lib', '/lib64', '/usr/lib64'])
+
+        cache = {}
+        lib_re = re.compile(r'lib(.*)\.s[ol]')
+        ext_re = re.compile(r'\.s[ol]$')
+        for dir in directories:
+            try:
+                for path in glob.glob("%s/*.s[ol]*" % dir):
+                    file = os.path.basename(path)
+
+                    # Index by filename
+                    if file not in cache:
+                        cache[file] = path
+                    
+                    # Index by library name
+                    match = lib_re.match(file)
+                    if match:
+                        library = match.group(1)
+                        if library not in cache:
+                            cache[library] = path
+            except OSError:
+                pass
+
+        self._ld_so_cache = cache
+    
+    def getplatformpaths(self, libname):
+        if self._ld_so_cache is None:
+            self._create_ld_so_cache()
+
+        result = self._ld_so_cache.get(libname)
+        if result: yield result
+
+        path = ctypes.util.find_library(libname)
+        if path: yield os.path.join("/lib",path)
+
+# Windows
+
+class _WindowsLibrary(object):
+    def __init__(self, path):
+        self.cdll = ctypes.cdll.LoadLibrary(path)
+        self.windll = ctypes.windll.LoadLibrary(path)
+
+    def __getattr__(self, name):
+        try: return getattr(self.cdll,name)
+        except AttributeError:
+            try: return getattr(self.windll,name)
+            except AttributeError:
+                raise
+
+class WindowsLibraryLoader(LibraryLoader):
+    name_formats = ["%s.dll", "lib%s.dll"]
+    
+    def load(self, path):
+        return _WindowsLibrary(path)
+    
+    def getplatformpaths(self, libname):
+        if os.path.sep not in libname:
+            for name in self.name_formats:
+                path = ctypes.util.find_library(name % libname)
+                if path:
+                    yield path
+
+# Platform switching
+
+# If your value of sys.platform does not appear in this dict, please contact
+# the Ctypesgen maintainers.
+
+loaderclass = {
+    "darwin":   DarwinLibraryLoader,
+    "cygwin":   WindowsLibraryLoader,
+    "win32":    WindowsLibraryLoader
+}
+
+loader = loaderclass.get(sys.platform, PosixLibraryLoader)()
+
+def add_library_search_dirs(other_dirs):
+    loader.other_dirs = other_dirs
+
+load_library = loader.load_library
+
+del loaderclass

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/messages.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/messages.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/messages.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+"""
+ctypesgencore.messages contains functions to display status, error, or warning
+messages to the user. Warning and error messages are also associated
+with a "message class", which is a string, which currently has no effect.
+
+Error classes are:
+'usage' - there was something funny about the command-line parameters
+'cparser' - there was a syntax error in the header file
+'missing-library' - a library could not be loaded
+'macro' - a macro could not be translated to Python
+'unsupported-type' - there was a type in the header that ctypes cannot use, like
+    "long double".
+'other' - catchall.
+
+Warning classes are:
+'usage' - there was something funny about the command-line parameters
+'rename' - a description has been renamed to avoid a name conflict
+'other' - catchall.
+"""
+
+import sys
+
+__all__ = ["error_message","warning_message","status_message"]
+
+def error_message(msg,cls=None):
+    print "Error: %s" % msg
+
+def warning_message(msg,cls=None):
+    print "Warning: %s" % msg
+
+def status_message(msg):
+    print "Status: %s" % msg

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/old libraryloader.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/old libraryloader.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/old libraryloader.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,312 @@
+# ----------------------------------------------------------------------------
+# Copyright (c) 2008 David James
+# Copyright (c) 2006-2008 Alex Holkner
+# All rights reserved.
+# 
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions 
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright 
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#  * Neither the name of pyglet nor the names of its
+#    contributors may be used to endorse or promote products
+#    derived from this software without specific prior written
+#    permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# ----------------------------------------------------------------------------
+
+import os
+import re
+import sys
+
+import ctypes
+import ctypes.util
+
+
+_debug_lib = False
+_debug_trace = False
+
+class _TraceFunction(object):
+    def __init__(self, func):
+        self.__dict__['_func'] = func
+
+    def __str__(self):
+        return self._func.__name__
+
+    def __call__(self, *args, **kwargs):
+        return self._func(*args, **kwargs)
+
+    def __getattr__(self, name):
+        return getattr(self._func, name)
+
+    def __setattr__(self, name, value):
+        setattr(self._func, name, value)
+
+class _TraceLibrary(object):
+    def __init__(self, library):
+        self._library = library
+        print library
+
+    def __getattr__(self, name):
+        func = getattr(self._library, name)
+        f = _TraceFunction(func)
+        return f
+
+class _WindowsLibrary(object):
+    def __init__(self, path):
+        self._libraries = [
+          ctypes.cdll.LoadLibrary(path),
+          ctypes.windll.LoadLibrary(path)
+        ]
+
+    def __getattr__(self, name):
+        for i in range(len(self._libraries)): 
+            try:
+                func = getattr(self._libraries[i], name)
+                f = _TraceFunction(func)
+                return f
+            except AttributeError:
+                if i > 0:
+                    raise
+
+
+
+class LibraryLoader(object):
+    def load_library(self, *names, **kwargs):
+        '''Find and load a library.  
+        
+        More than one name can be specified, they will be tried in order.
+        Platform-specific library names (given as kwargs) are tried first.
+
+        Raises ImportError if library is not found.
+        '''
+                
+        if 'framework' in kwargs and self.platform == 'darwin':
+            return self.load_framework(kwargs['framework'])
+        
+        platform_names = kwargs.get(self.platform, [])
+        if type(platform_names) in (str, unicode):
+            platform_names = [platform_names]
+        elif type(platform_names) is tuple:
+            platform_names = list(platform_names)
+
+        if self.platform == 'linux2':
+            platform_names.extend(['lib%s.so' % n for n in names])
+        elif self.platform == 'win32':
+            platform_names.extend(['%s.dll' % n for n in names])
+            platform_names.extend(['lib%s.dll' % n for n in names])
+        elif self.platform == 'darwin':
+            platform_names.extend(['%s.dylib' % n for n in names])
+            platform_names.extend(['lib%s.dylib' % n for n in names])
+        
+        platform_names.extend(names)
+        for name in platform_names:
+            path = self.find_library(name)
+            if path:
+                try:
+                    if self.platform == 'win32':
+                        lib = _WindowsLibrary(path)
+                    else:
+                        lib = ctypes.cdll.LoadLibrary(path)
+                    if _debug_lib:
+                        print path
+                    if _debug_trace:
+                        lib = _TraceLibrary(lib)
+                    return lib
+                except OSError,e:
+                    pass
+        raise ImportError('Library "%s" not found.' % names[0])
+
+    find_library = lambda self, name: ctypes.util.find_library(name)
+
+    platform = sys.platform
+    if platform == 'cygwin':
+        platform = 'win32'
+
+    def load_framework(self, path):
+        raise RuntimeError("Can't load framework on this platform.")
+
+class MachOLibraryLoader(LibraryLoader):
+    def __init__(self):
+        if 'LD_LIBRARY_PATH' in os.environ:
+            self.ld_library_path = os.environ['LD_LIBRARY_PATH'].split(':')
+        else:
+            self.ld_library_path = []
+
+        if 'DYLD_LIBRARY_PATH' in os.environ:
+            self.dyld_library_path = os.environ['DYLD_LIBRARY_PATH'].split(':')
+        else:
+            self.dyld_library_path = []
+
+        if 'DYLD_FALLBACK_LIBRARY_PATH' in os.environ:
+            self.dyld_fallback_library_path = \
+                os.environ['DYLD_FALLBACK_LIBRARY_PATH'].split(':')
+        else:
+            self.dyld_fallback_library_path = [
+                os.path.expanduser('~/lib'),
+                '/usr/local/lib',
+                '/usr/lib']
+        
+    def find_library(self, path):
+        '''Implements the dylib search as specified in Apple documentation:
+        
+        http://developer.apple.com/documentation/DeveloperTools/Conceptual/DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
+
+        Before commencing the standard search, the method first checks
+        the bundle's ``Frameworks`` directory if the application is running
+        within a bundle (OS X .app).
+        '''
+
+        libname = os.path.basename(path)
+        search_path = []
+
+        if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app':
+            search_path.append(os.path.join(
+                os.environ['RESOURCEPATH'],
+                '..',
+                'Frameworks',
+                libname))
+                
+        if '/' in path:
+            search_path.extend(
+                [os.path.join(p, libname) \
+                    for p in self.dyld_library_path])
+            search_path.append(path)
+            search_path.extend(
+                [os.path.join(p, libname) \
+                    for p in self.dyld_fallback_library_path])
+        else:
+            search_path.extend(
+                [os.path.join(p, libname) \
+                    for p in self.ld_library_path])
+            search_path.extend(
+                [os.path.join(p, libname) \
+                    for p in self.dyld_library_path])
+            search_path.append(path)
+            search_path.extend(
+                [os.path.join(p, libname) \
+                    for p in self.dyld_fallback_library_path])
+                
+        for path in search_path:
+            if os.path.exists(path):
+                return path
+
+        return None
+
+    def find_framework(self, path):
+        '''Implement runtime framework search as described by:
+
+        http://developer.apple.com/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkBinding.html
+        '''
+
+        # e.g. path == '/System/Library/Frameworks/OpenGL.framework'
+        #      name == 'OpenGL'
+        # return '/System/Library/Frameworks/OpenGL.framework/OpenGL'
+        name = os.path.splitext(os.path.split(path)[1])[0]
+
+        realpath = os.path.join(path, name) 
+        if os.path.exists(realpath):
+            return realpath
+
+        for dir in ('/Library/Frameworks',
+                    '/System/Library/Frameworks'):
+            realpath = os.path.join(dir, '%s.framework' % name, name)
+            if os.path.exists(realpath):
+                return realpath
+
+        return None
+
+    def load_framework(self, path):
+        realpath = self.find_framework(path)
+        if realpath:
+            lib = ctypes.cdll.LoadLibrary(realpath)
+            if _debug_lib:
+                print realpath
+            if _debug_trace:
+                lib = _TraceLibrary(lib)
+            return lib
+
+        raise ImportError("Can't find framework %s." % path)
+
+class LinuxLibraryLoader(LibraryLoader):
+    _ld_so_cache = None
+
+    def _create_ld_so_cache(self):
+        # Recreate search path followed by ld.so.  This is going to be
+        # slow to build, and incorrect (ld.so uses ld.so.cache, which may
+        # not be up-to-date).  Used only as fallback for distros without
+        # /sbin/ldconfig.
+        #
+        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.
+
+        directories = []
+        try:
+            directories.extend(os.environ['LD_LIBRARY_PATH'].split(':'))
+        except KeyError:
+            pass
+
+        try:
+            directories.extend([dir.strip() for dir in open('/etc/ld.so.conf')])
+        except IOError:
+            pass
+
+        directories.extend(['/lib', '/usr/lib'])
+
+        cache = {}
+        lib_re = re.compile(r'lib(.*)\.so$')
+        for dir in directories:
+            try:
+                for file in os.listdir(dir):
+                    if '.so' not in file:
+                        continue
+
+                    # Index by filename
+                    path = os.path.join(dir, file)
+                    if file not in cache:
+                        cache[file] = path
+
+                    # Index by library name
+                    match = lib_re.match(file)
+                    if match:
+                        library = match.group(1)
+                        if library not in cache:
+                            cache[library] = path
+            except OSError:
+                pass
+
+        self._ld_so_cache = cache
+
+    def find_library(self, path):
+        # Implement the ld-linux.so search path as described in
+        # the man page.
+
+        if self._ld_so_cache is None:
+            self._create_ld_so_cache()
+
+        return self._ld_so_cache.get(path)
+
+if sys.platform == 'darwin':
+    loader = MachOLibraryLoader()
+elif sys.platform == 'linux2':
+    loader = LinuxLibraryLoader()
+else:
+    loader = LibraryLoader()
+load_library = loader.load_library
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/options.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/options.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/options.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+"""
+All of the components of ctypegencore require an argument called "options".
+In command-line usage, this would be an optparser.Values object. However, if
+ctypesgencore is used as a standard Python module, constructing this object
+would be a pain. So this module exists to provide a "default" options object
+for convenience.
+"""
+
+import optparse, copy
+
+default_values={
+    "other_headers": [],
+    "modules": [],
+    "include_search_paths": [],
+    "compile_libdirs": [],
+    "runtime_libdirs": [],
+    "cpp": "gcc -E",
+    "save_preprocessed_headers": None,
+    "all_headers": False,
+    "builtin_symbols": False,
+    "include_symbols": None,
+    "exclude_symbols": None,
+    "show_all_errors": False,
+    "show_long_errors": False,
+    "show_macro_warnings": True,
+    "header_template": None,
+    "inserted_files": [],
+    "other_known_names": [],
+    "include_macros": True,
+    "libraries": [],
+    "strip_build_path": None
+}
+
+def get_default_options():
+    return optparse.Values(copy.deepcopy(default_values))

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/__init__.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/__init__.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/__init__.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+"""
+This package parses C header files and generates lists of functions, typedefs,
+variables, structs, unions, enums, macros, and constants. This package knows
+nothing about the libraries themselves.
+
+The public interface for this package is the function "parse". Use as follows:
+>>> descriptions = parse(["inputfile1.h","inputfile2.h"], options)
+where "options" is an optparse.Values object.
+
+parse() returns a DescriptionCollection object. See ctypesgencore.descriptions
+for more information.
+
+"""
+
+from datacollectingparser import DataCollectingParser
+
+def parse(headers, options):
+    parser=DataCollectingParser(headers, options)
+    parser.parse()
+    return parser.data()
+
+__all__ = ["parse"]
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cdeclarations.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cdeclarations.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cdeclarations.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+
+'''
+This file contains classes that represent C declarations. cparser produces
+declarations in this format, and ctypesparser reformats them into a format that
+is not C-specific. The other modules don't need to touch these.
+'''
+
+__docformat__ = 'restructuredtext'
+
+# --------------------------------------------------------------------------
+# C Object Model
+# --------------------------------------------------------------------------
+
+class Declaration(object):
+    def __init__(self):
+        self.declarator = None
+        self.type = Type()
+        self.storage = None
+
+    def __repr__(self):
+        d = {
+            'declarator': self.declarator,
+            'type': self.type,
+        }
+        if self.storage:
+            d['storage'] = self.storage
+        l = ['%s=%r' % (k, v) for k, v in d.items()]
+        return 'Declaration(%s)' % ', '.join(l)
+
+class Declarator(object):
+    pointer = None
+    def __init__(self):
+        self.identifier = None
+        self.initializer = None
+        self.array = None
+        self.parameters = None
+        self.bitfield = None
+
+    # make pointer read-only to catch mistakes early
+    pointer = property(lambda self: None)
+
+    def __repr__(self):
+        s = self.identifier or ''
+        if self.bitfield:
+            s += ":%d" % self.bitfield
+        if self.array:
+            s += repr(self.array)
+        if self.initializer:
+            s += ' = %r' % self.initializer
+        if self.parameters is not None:
+            s += '(' + ', '.join([repr(p) for p in self.parameters]) + ')'
+        return s
+
+class Pointer(Declarator):
+    pointer = None
+    def __init__(self):
+        super(Pointer, self).__init__()
+        self.qualifiers = []
+
+    def __repr__(self):
+        q = ''
+        if self.qualifiers:
+            q = '<%s>' % ' '.join(self.qualifiers)
+        return 'POINTER%s(%r)' % (q, self.pointer) + \
+            super(Pointer, self).__repr__()
+
+class Array(object):
+    def __init__(self):
+        self.size = None
+        self.array = None
+
+    def __repr__(self):
+        if self.size:
+            a =  '[%r]' % self.size
+        else:
+            a = '[]'
+        if self.array:
+            return repr(self.array) + a
+        else:
+            return a
+
+class Parameter(object):
+    def __init__(self):
+        self.type = Type()
+        self.storage = None
+        self.declarator = None
+
+    def __repr__(self):
+        d = {
+            'type': self.type,
+        }
+        if self.declarator:
+            d['declarator'] = self.declarator
+        if self.storage:
+            d['storage'] = self.storage
+        l = ['%s=%r' % (k, v) for k, v in d.items()]
+        return 'Parameter(%s)' % ', '.join(l)
+
+
+class Type(object):
+    def __init__(self):
+        self.qualifiers = []
+        self.specifiers = []
+
+    def __repr__(self):
+        return ' '.join(self.qualifiers + [str(s) for s in self.specifiers])
+
+# These are used only internally.
+
+class StorageClassSpecifier(str):
+    pass
+
+class TypeSpecifier(str):
+    pass
+
+class StructTypeSpecifier(object):
+    def __init__(self, is_union, tag, declarations):
+        self.is_union = is_union
+        self.tag = tag
+        self.declarations = declarations
+
+    def __repr__(self):
+        if self.is_union:
+            s = 'union'
+        else:
+            s = 'struct'
+        if self.tag:
+            s += ' %s' % self.tag
+        if self.declarations:
+            s += ' {%s}' % '; '.join([repr(d) for d in self.declarations])
+        return s
+
+class EnumSpecifier(object):
+    def __init__(self, tag, enumerators, src=None):
+        self.tag = tag
+        self.enumerators = enumerators
+        self.src=src
+
+    def __repr__(self):
+        s = 'enum'
+        if self.tag:
+            s += ' %s' % self.tag
+        if self.enumerators:
+            s += ' {%s}' % ', '.join([repr(e) for e in self.enumerators])
+        return s
+
+class Enumerator(object):
+    def __init__(self, name, expression):
+        self.name = name
+        self.expression = expression
+
+    def __repr__(self):
+        s = self.name
+        if self.expression:
+            s += ' = %r' % self.expression
+        return s
+
+class TypeQualifier(str):
+    pass
+
+def apply_specifiers(specifiers, declaration):
+    '''Apply specifiers to the declaration (declaration may be
+    a Parameter instead).'''
+    for s in specifiers:
+        if type(s) == StorageClassSpecifier:
+            if declaration.storage:
+                # Multiple storage classes, technically an error... ignore it
+                pass
+            declaration.storage = s
+        elif type(s) in (TypeSpecifier, StructTypeSpecifier, EnumSpecifier):
+            declaration.type.specifiers.append(s)
+        elif type(s) == TypeQualifier:
+            declaration.type.qualifiers.append(s)

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cgrammar.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cgrammar.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cgrammar.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,1093 @@
+#!/usr/bin/env python
+
+'''This is a yacc grammar for C.
+
+Derived from ANSI C grammar:
+  * Lexicon: http://www.lysator.liu.se/c/ANSI-C-grammar-l.html
+  * Grammar: http://www.lysator.liu.se/c/ANSI-C-grammar-y.html
+
+Reference is C99:
+  * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf
+
+'''
+
+__docformat__ = 'restructuredtext'
+
+import operator
+import os.path
+import re
+import sys
+import time
+import warnings
+
+import preprocessor
+import yacc
+import ctypesparser
+import ctypesgencore.expressions as expressions
+import cdeclarations
+
+tokens = (
+    'PP_DEFINE', 'PP_DEFINE_NAME', 'PP_DEFINE_MACRO_NAME', 'PP_MACRO_PARAM',
+    'PP_STRINGIFY', 'PP_IDENTIFIER_PASTE', 'PP_END_DEFINE',
+
+    'IDENTIFIER', 'CONSTANT', 'CHARACTER_CONSTANT', 'STRING_LITERAL', 'SIZEOF',
+    'PTR_OP', 'INC_OP', 'DEC_OP', 'LEFT_OP', 'RIGHT_OP', 'LE_OP', 'GE_OP',
+    'EQ_OP', 'NE_OP', 'AND_OP', 'OR_OP', 'MUL_ASSIGN', 'DIV_ASSIGN',
+    'MOD_ASSIGN', 'ADD_ASSIGN', 'SUB_ASSIGN', 'LEFT_ASSIGN', 'RIGHT_ASSIGN',
+    'AND_ASSIGN', 'XOR_ASSIGN', 'OR_ASSIGN',  'PERIOD', 'TYPE_NAME', 
+    
+    'TYPEDEF', 'EXTERN', 'STATIC', 'AUTO', 'REGISTER', 
+    'CHAR', 'SHORT', 'INT', 'LONG', 'SIGNED', 'UNSIGNED', 'FLOAT', 'DOUBLE',
+    'CONST', 'VOLATILE', 'VOID',
+    'STRUCT', 'UNION', 'ENUM', 'ELLIPSIS',
+
+    'CASE', 'DEFAULT', 'IF', 'ELSE', 'SWITCH', 'WHILE', 'DO', 'FOR', 'GOTO',
+    'CONTINUE', 'BREAK', 'RETURN', '__ASM__'
+)
+
+keywords = [
+    'auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do',
+    'double', 'else', 'enum', 'extern', 'float', 'for', 'goto', 'if', 'int',
+    'long', 'register', 'return', 'short', 'signed', 'sizeof', 'static',
+    'struct', 'switch', 'typedef', 'union', 'unsigned', 'void', 'volatile',
+    'while', '__asm__'
+]
+
+def p_translation_unit(p):
+    '''translation_unit : 
+                        | translation_unit external_declaration
+                        | translation_unit define
+    '''
+    # Starting production.
+    # Allow empty production so that files with no declarations are still
+    #    valid.
+    # Intentionally empty
+
+def p_identifier(p):
+    '''identifier : IDENTIFIER
+                  | IDENTIFIER PP_IDENTIFIER_PASTE identifier
+                  | PP_MACRO_PARAM PP_IDENTIFIER_PASTE identifier
+                  | IDENTIFIER PP_IDENTIFIER_PASTE PP_MACRO_PARAM
+                  | PP_MACRO_PARAM PP_IDENTIFIER_PASTE PP_MACRO_PARAM
+    '''
+    if len(p)==2:
+        p[0] = expressions.IdentifierExpressionNode(p[1])
+    else:
+        # Should it be supported? It wouldn't be very hard to add support.
+        # Basically, it would involve a new ExpressionNode called
+        # an IdentifierPasteExpressionNode that took a list of strings and
+        # ParameterExpressionNodes. Then it would generate code like
+        # "locals()['%s' + '%s' + ...]" where %s was substituted with the
+        # elements of the list. I haven't supported it yet because I think
+        # it's unnecessary and a little too powerful.
+        p[0] = expressions.UnsupportedExpressionNode("Identifier pasting is " \
+            "not supported by ctypesgen.")
+
+def p_constant(p):
+    '''constant : CONSTANT
+                | CHARACTER_CONSTANT
+    '''
+    constant = p[1]
+    
+    if constant[0]=="'":
+        # Character constant
+        value = constant[1:-1]
+    else:
+        # This is a value formatted the way that the preprocessor formats
+        # numeric constants. It puts a prefix "l", "i", or "f" to indicate
+        # if it should be converted into an integer, long or float.
+        prefix = constant[0]
+        constant = constant[1:]
+        if prefix=="i":
+            value = int(constant)
+        elif prefix=="l":
+            value = long(constant)
+        else:
+            value = float(constant)
+    
+    p[0] = expressions.ConstantExpressionNode(value)
+
+def p_string_literal(p):
+    '''string_literal : STRING_LITERAL'''
+    p[0] = expressions.ConstantExpressionNode(p[1])
+
+def p_multi_string_literal(p):
+    '''multi_string_literal : string_literal
+                            | macro_param
+                            | multi_string_literal string_literal
+                            | multi_string_literal macro_param
+    '''
+    if len(p)==2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("string concatenation",
+            (lambda x,y: x+y), "(%s + %s)", (False,False), p[1], p[2])
+
+def p_macro_param(p):
+    '''macro_param : PP_MACRO_PARAM
+                   | PP_STRINGIFY PP_MACRO_PARAM
+    '''
+    if len(p)==2:
+        p[0] = expressions.ParameterExpressionNode(p[1])
+    else:
+        p[0] = expressions.ParameterExpressionNode(p[2])
+
+def p_primary_expression(p):
+    '''primary_expression : identifier
+                          | constant
+                          | multi_string_literal
+                          | '(' expression ')'
+    '''
+    if p[1] == '(':
+        p[0] = p[2]
+    else:
+        p[0] = p[1]
+
+def p_postfix_expression(p):
+    '''postfix_expression : primary_expression
+                  | postfix_expression '[' expression ']'
+                  | postfix_expression '(' ')'
+                  | postfix_expression '(' argument_expression_list ')'
+                  | postfix_expression PERIOD IDENTIFIER
+                  | postfix_expression PTR_OP IDENTIFIER
+                  | postfix_expression INC_OP
+                  | postfix_expression DEC_OP
+    '''
+    
+    if len(p)==2:
+        p[0] = p[1]
+    
+    elif p[2]=='[':
+        p[0] = expressions.BinaryExpressionNode("array access",
+            (lambda a,b: a[b]), "(%s [%s])", (True,False), p[1], p[3])
+    
+    elif p[2]=='(':
+        if p[3]==')':
+            p[0] = expressions.CallExpressionNode(p[1],[])
+        else:
+            p[0] = expressions.CallExpressionNode(p[1],p[3])
+    
+    elif p[2]=='.':
+        p[0] = expressions.AttributeExpressionNode( \
+            (lambda x,a: getattr(x,a)), "(%s.%s)", p[1],p[3])
+    
+    elif p[2]=='->':
+        p[0] = expressions.AttributeExpressionNode( \
+            (lambda x,a: getattr(x.contents,a)), "(%s.contents.%s)", p[1],p[3])
+    
+    elif p[2]=='++':
+        p[0] = expressions.UnaryExpressionNode("increment",(lambda x: x+1),
+                                               "(%s + 1)", False,p[1])
+    
+    elif p[2]=='--':
+        p[0] = expressions.UnaryExpressionNode("decrement",(lambda x: x-1),
+                                               "(%s - 1)", False,p[1])
+
+def p_argument_expression_list(p):
+    '''argument_expression_list : assignment_expression
+                        | argument_expression_list ',' assignment_expression
+    '''
+    if len(p) == 4:
+        p[1].append(p[3])
+        p[0] = p[1]
+    else:
+        p[0] = [p[1]]
+
+def p_asm_expression(p):
+    '''asm_expression : __ASM__ volatile_opt '(' string_literal ')'
+                      | __ASM__ volatile_opt '(' string_literal ':' str_opt_expr_pair_list ')'
+                      | __ASM__ volatile_opt '(' string_literal ':' str_opt_expr_pair_list ':' str_opt_expr_pair_list ')'
+                      | __ASM__ volatile_opt '(' string_literal ':' str_opt_expr_pair_list ':' str_opt_expr_pair_list ':' str_opt_expr_pair_list ')'
+    '''
+
+    # Definitely not ISO C, adapted from example ANTLR GCC parser at
+    #  http://www.antlr.org/grammar/cgram//grammars/GnuCParser.g
+    # but more lenient (expressions permitted in optional final part, when
+    # they shouldn't be -- avoids shift/reduce conflict with
+    # str_opt_expr_pair_list).
+
+    p[0] = expressions.UnsupportedExpressionNode("This node is ASM assembler.")
+
+def p_str_opt_expr_pair_list(p):
+    '''str_opt_expr_pair_list : 
+                              | str_opt_expr_pair
+                              | str_opt_expr_pair_list ',' str_opt_expr_pair
+    '''
+
+def p_str_opt_expr_pair(p):
+   '''str_opt_expr_pair : string_literal
+                        | string_literal '(' expression ')'
+    '''
+
+def p_volatile_opt(p):
+    '''volatile_opt : 
+                    | VOLATILE
+    '''
+
+prefix_ops_dict = {
+    "++": ("increment",(lambda x: x+1),"(%s + 1)",False),
+    "--": ("decrement",(lambda x: x-1),"(%s - 1)",False),
+    '&': ("reference ('&')",None,"pointer(%s)",True),
+    '*': ("dereference ('*')",None,"(%s[0])",True),
+    '+': ("unary '+'",(lambda x: x),"%s",True),
+    '-': ("negation",(lambda x: -x),"(-%s)",False),
+    '~': ("inversion",(lambda x: ~x),"(~%s)",False),
+    '!': ("logical not",(lambda x: not x),"(not %s)",True)
+}
+
+def p_unary_expression(p):
+    '''unary_expression : postfix_expression
+                        | INC_OP unary_expression
+                        | DEC_OP unary_expression
+                        | unary_operator cast_expression
+                        | SIZEOF unary_expression
+                        | SIZEOF '(' type_name ')'
+                        | asm_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    
+    elif p[1] == 'sizeof':
+        if len(p)==5:
+            p[0] = expressions.SizeOfExpressionNode(p[3])
+        else:
+            p[0] = expressions.SizeOfExpressionNode(p[2])
+    
+    else:
+        name,op,format,can_be_ctype = prefix_ops_dict[p[1]]
+        p[0] = expressions.UnaryExpressionNode(name, op, format, can_be_ctype,
+                                               p[2])
+
+def p_unary_operator(p):
+    '''unary_operator : '&'
+                      | '*'
+                      | '+'
+                      | '-'
+                      | '~'
+                      | '!'
+    '''
+    p[0] = p[1]
+
+def p_cast_expression(p):
+    '''cast_expression : unary_expression
+                       | '(' type_name ')' cast_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.TypeCastExpressionNode(p[4],p[2])
+
+mult_ops_dict = {
+    '*': ("multiplication", (lambda x,y: x*y), "(%s * %s)"),
+    '/': ("division", (lambda x,y: x/y), "(%s / %s)"),
+    '%': ("modulo", (lambda x,y: x%y), "(%s %% %s)")
+}
+
+def p_multiplicative_expression(p):
+    '''multiplicative_expression : cast_expression
+                                 | multiplicative_expression '*' cast_expression
+                                 | multiplicative_expression '/' cast_expression
+                                 | multiplicative_expression '%' cast_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:        
+        name,op,format = mult_ops_dict[p[2]]
+        p[0] = expressions.BinaryExpressionNode(name, op, format, (False,False),
+            p[1], p[3])
+
+add_ops_dict = {
+    '+': ("addition", (lambda x,y: x+y), "(%s + %s)"),
+    '-': ("subtraction", (lambda x,y: x-y), "(%s - %s)")
+}
+
+def p_additive_expression(p):
+    '''additive_expression : multiplicative_expression
+                           | additive_expression '+' multiplicative_expression
+                           | additive_expression '-' multiplicative_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        name,op,format = add_ops_dict[p[2]]
+        p[0] = expressions.BinaryExpressionNode(name, op, format, (False,False),
+            p[1], p[3])
+
+shift_ops_dict = {
+    '>>': ("right shift", (lambda x,y: x>>y), "(%s >> %s)"),
+    '<<': ("left shift", (lambda x,y: x<<y), "(%s << %s)")
+}
+
+def p_shift_expression(p):
+    '''shift_expression : additive_expression
+                        | shift_expression LEFT_OP additive_expression
+                        | shift_expression RIGHT_OP additive_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        name,op,format = shift_ops_dict[p[2]]
+        p[0] = expressions.BinaryExpressionNode(name, op, format, (False,False),
+            p[1], p[3])
+
+rel_ops_dict = {
+    '>': ("greater-than", (lambda x,y: x>y), "(%s > %s)"),
+    '<': ("less-than", (lambda x,y: x<y), "(%s < %s)"),
+    '>=': ("greater-than-equal", (lambda x,y: x>=y), "(%s >= %s)"),
+    '<=': ("less-than-equal", (lambda x,y: x<=y), "(%s <= %s)")
+}
+
+def p_relational_expression(p):
+    '''relational_expression : shift_expression 
+                             | relational_expression '<' shift_expression
+                             | relational_expression '>' shift_expression
+                             | relational_expression LE_OP shift_expression
+                             | relational_expression GE_OP shift_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        name,op,format = rel_ops_dict[p[2]]
+        p[0] = expressions.BinaryExpressionNode(name, op, format, (False,False),
+            p[1], p[3])
+
+equality_ops_dict = {
+    '==': ("equals", (lambda x,y: x==y), "(%s == %s)"),
+    '!=': ("not equals", (lambda x,y: x!=y), "(%s != %s)")
+}
+
+def p_equality_expression(p):
+    '''equality_expression : relational_expression
+                           | equality_expression EQ_OP relational_expression
+                           | equality_expression NE_OP relational_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        name,op,format = equality_ops_dict[p[2]]
+        p[0] = expressions.BinaryExpressionNode(name, op, format, (False,False),
+            p[1], p[3])
+
+def p_and_expression(p):
+    '''and_expression : equality_expression
+                      | and_expression '&' equality_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("bitwise and",
+            (lambda x,y: x&y), "(%s & %s)", (False,False), p[1], p[3])
+
+def p_exclusive_or_expression(p):
+    '''exclusive_or_expression : and_expression
+                               | exclusive_or_expression '^' and_expression
+    ''' 
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("bitwise xor",
+            (lambda x,y: x^y), "(%s ^ %s)", (False,False), p[1], p[3])
+
+def p_inclusive_or_expression(p):
+    '''inclusive_or_expression : exclusive_or_expression
+                   | inclusive_or_expression '|' exclusive_or_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("bitwise or",
+            (lambda x,y: x|y), "(%s | %s)", (False,False), p[1], p[3])
+
+def p_logical_and_expression(p):
+    '''logical_and_expression : inclusive_or_expression
+                  | logical_and_expression AND_OP inclusive_or_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("logical and",
+            (lambda x,y: x and y), "(%s and %s)", (True,True), p[1], p[3])
+
+def p_logical_or_expression(p):
+    '''logical_or_expression : logical_and_expression
+                  | logical_or_expression OR_OP logical_and_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.BinaryExpressionNode("logical and",
+            (lambda x,y: x or y), "(%s or %s)", (True,True), p[1], p[3])
+
+def p_conditional_expression(p):
+    '''conditional_expression : logical_or_expression
+          | logical_or_expression '?' expression ':' conditional_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = expressions.ConditionalExpressionNode(p[1], p[3], p[5])
+
+assign_ops_dict = {
+    '*=': ("multiply", (lambda x,y: x*y), "(%s * %s)"),
+    '/=': ("divide", (lambda x,y: x/y), "(%s / %s)"),
+    '%=': ("modulus", (lambda x,y: x%y), "(%s % %s)"),
+    '+=': ("addition", (lambda x,y: x+y), "(%s + %s)"),
+    '-=': ("subtraction", (lambda x,y: x-y), "(%s - %s)"),
+    '<<=': ("left shift", (lambda x,y: x<<y), "(%s << %s)"),
+    '>>=': ("right shift",(lambda x,y: x>>y),"(%s >> %s)"),
+    '&=': ("bitwise and", (lambda x,y: x&y), "(%s & %s)"),
+    '^=': ("bitwise xor", (lambda x,y: x^y), "(%s ^ %s)"),
+    '|=': ("bitwise or", (lambda x,y: x|y), "(%s | %s)")
+}
+
+def p_assignment_expression(p):
+    '''assignment_expression : conditional_expression
+                 | unary_expression assignment_operator assignment_expression
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        # In C, the value of (x*=3) is the same as (x*3). We support that here.
+        # However, we don't support the change in the value of x.
+        if p[2]=='=':
+            p[0] = p[3]
+        else:
+            name,op,format = assign_ops_dict[p[2]]
+            p[0] = expressions.BinaryExpressionNode(name,op,format,(True,True),
+                p[1],p[3])
+
+def p_assignment_operator(p):
+    '''assignment_operator : '='
+                           | MUL_ASSIGN
+                           | DIV_ASSIGN
+                           | MOD_ASSIGN
+                           | ADD_ASSIGN
+                           | SUB_ASSIGN
+                           | LEFT_ASSIGN
+                           | RIGHT_ASSIGN
+                           | AND_ASSIGN
+                           | XOR_ASSIGN
+                           | OR_ASSIGN
+    '''
+    p[0] = p[1]
+    
+def p_expression(p):
+    '''expression : assignment_expression
+                  | expression ',' assignment_expression
+    '''
+    p[0] = p[1]
+    # We don't need to support sequence expressions...
+
+def p_constant_expression(p):
+    '''constant_expression : conditional_expression
+    '''
+    p[0] = p[1]
+
+def p_declaration(p):
+    '''declaration : declaration_impl ';'
+    '''
+    # The ';' must be here, not in 'declaration', as declaration needs to
+    # be executed before the ';' is shifted (otherwise the next lookahead will
+    # be read, which may be affected by this declaration if its a typedef.
+
+def p_declaration_impl(p):
+    '''declaration_impl : declaration_specifiers
+                        | declaration_specifiers init_declarator_list
+    '''
+    declaration = cdeclarations.Declaration()
+    cdeclarations.apply_specifiers(p[1], declaration)
+
+    if len(p) == 2:
+        filename = p.slice[1].filename
+        lineno = p.slice[1].lineno
+        p.parser.cparser.impl_handle_declaration(declaration, filename, lineno)
+        return
+
+    filename = p.slice[2].filename
+    lineno = p.slice[2].lineno
+    for declarator in p[2]:
+        declaration.declarator = declarator
+        p.parser.cparser.impl_handle_declaration(declaration, filename, lineno)
+
+# shift/reduce conflict with p_statement_error.
+#def p_declaration_error(p):
+#    '''declaration : error ';'
+#    '''
+#    # Error resynchronisation catch-all
+
+def p_declaration_specifiers(p):
+    '''declaration_specifiers : storage_class_specifier
+                              | storage_class_specifier declaration_specifiers
+                              | type_specifier
+                              | type_specifier declaration_specifiers
+                              | type_qualifier
+                              | type_qualifier declaration_specifiers
+    '''
+    if len(p) > 2:
+        p[0] = (p[1],) + p[2]
+    else:
+        p[0] = (p[1],)
+
+def p_init_declarator_list(p):
+    '''init_declarator_list : init_declarator
+                            | init_declarator_list ',' init_declarator
+    '''
+    if len(p) > 2:
+        p[0] = p[1] + (p[3],)
+    else:
+        p[0] = (p[1],)
+
+def p_init_declarator(p):
+    '''init_declarator : declarator
+                       | declarator '=' initializer
+    '''
+    p[0] = p[1]
+    if len(p) > 2:
+        p[0].initializer = p[2]
+
+def p_storage_class_specifier(p):
+    '''storage_class_specifier : TYPEDEF
+                               | EXTERN
+                               | STATIC
+                               | AUTO
+                               | REGISTER
+    '''
+    p[0] = cdeclarations.StorageClassSpecifier(p[1])
+
+def p_type_specifier(p):
+    '''type_specifier : VOID
+                      | CHAR
+                      | SHORT
+                      | INT
+                      | LONG
+                      | FLOAT
+                      | DOUBLE
+                      | SIGNED
+                      | UNSIGNED
+                      | struct_or_union_specifier
+                      | enum_specifier
+                      | TYPE_NAME
+    '''
+    if type(p[1]) in (cdeclarations.StructTypeSpecifier,
+                      cdeclarations.EnumSpecifier):
+        p[0] = p[1]
+    else:
+        p[0] = cdeclarations.TypeSpecifier(p[1])
+
+def p_struct_or_union_specifier(p):
+    '''struct_or_union_specifier : struct_or_union IDENTIFIER '{' struct_declaration_list '}'
+         | struct_or_union TYPE_NAME '{' struct_declaration_list '}'
+         | struct_or_union '{' struct_declaration_list '}'
+         | struct_or_union IDENTIFIER
+         | struct_or_union TYPE_NAME
+    '''
+    # The TYPE_NAME ones are dodgy, needed for Apple headers
+    # CoreServices.framework/Frameworks/CarbonCore.framework/Headers/Files.h.
+    # CoreServices.framework/Frameworks/OSServices.framework/Headers/Power.h
+    if len(p) == 3:
+        p[0] = cdeclarations.StructTypeSpecifier(p[1], p[2], None)
+    elif p[2] == '{':
+        p[0] = cdeclarations.StructTypeSpecifier(p[1], '', p[3])
+    else:
+        p[0] = cdeclarations.StructTypeSpecifier(p[1], p[2], p[4])
+    
+    p[0].filename = p.slice[0].filename
+    p[0].lineno = p.slice[0].lineno
+
+def p_struct_or_union(p):
+    '''struct_or_union : STRUCT
+                       | UNION
+    '''
+    p[0] = p[1] == 'union'
+
+def p_struct_declaration_list(p):
+    '''struct_declaration_list : struct_declaration
+                               | struct_declaration_list struct_declaration
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+    else:
+        p[0] = p[1] + p[2]
+
+def p_struct_declaration(p):
+    '''struct_declaration : specifier_qualifier_list struct_declarator_list ';'
+                          | specifier_qualifier_list ';'
+    '''
+    # p[0] returned is a tuple, to handle multiple declarators in one
+    # declaration.
+    r = ()
+    if len(p) >= 4:
+        for declarator in p[2]:
+            declaration = cdeclarations.Declaration()
+            cdeclarations.apply_specifiers(p[1], declaration)
+            declaration.declarator = declarator
+            r += (declaration,)
+    p[0] = r
+
+def p_specifier_qualifier_list(p):
+    '''specifier_qualifier_list : type_specifier specifier_qualifier_list
+                                | type_specifier
+                                | type_qualifier specifier_qualifier_list
+                                | type_qualifier
+    '''
+    # Interesting.. why is this one right-recursion?
+    if len(p) == 3:
+        p[0] = (p[1],) + p[2]
+    else:
+        p[0] = (p[1],)
+
+def p_struct_declarator_list(p):
+    '''struct_declarator_list : struct_declarator
+                              | struct_declarator_list ',' struct_declarator
+    '''
+    if len(p) == 2:
+        p[0] = (p[1],)
+    else:
+        p[0] = p[1] + (p[3],)
+
+def p_struct_declarator(p):
+    '''struct_declarator : declarator
+                         | ':' constant_expression
+                         | declarator ':' constant_expression
+    '''
+    if p[1]==':':
+        p[0] = cdeclarations.Declarator()
+    else:
+        p[0] = p[1]
+        # Bitfield support
+        if len(p)==4:
+            p[0].bitfield = p[3]
+
+def p_enum_specifier(p):
+    '''enum_specifier : ENUM '{' enumerator_list '}'
+                      | ENUM IDENTIFIER '{' enumerator_list '}'
+                      | ENUM IDENTIFIER
+    '''
+    if len(p) == 5:
+        p[0] = cdeclarations.EnumSpecifier(None, p[3])
+    elif len(p) == 6:
+        p[0] = cdeclarations.EnumSpecifier(p[2], p[4])
+    else:
+        p[0] = cdeclarations.EnumSpecifier(p[2], ())
+    
+    p[0].filename = p.slice[0].filename
+    p[0].lineno = p.slice[0].lineno
+
+def p_enumerator_list(p):
+    '''enumerator_list : enumerator_list_iso
+                       | enumerator_list_iso ','
+    '''
+    # Apple headers sometimes have trailing ',' after enumerants, which is
+    # not ISO C.
+    p[0] = p[1]
+
+def p_enumerator_list_iso(p):
+    '''enumerator_list_iso : enumerator
+                           | enumerator_list_iso ',' enumerator
+    '''
+    if len(p) == 2:
+        p[0] = (p[1],)
+    else:
+        p[0] = p[1] + (p[3],)
+
+def p_enumerator(p):
+    '''enumerator : IDENTIFIER
+                  | IDENTIFIER '=' constant_expression
+    '''
+    if len(p) == 2:
+        p[0] = cdeclarations.Enumerator(p[1], None)
+    else:
+        p[0] = cdeclarations.Enumerator(p[1], p[3])
+
+def p_type_qualifier(p):
+    '''type_qualifier : CONST
+                      | VOLATILE
+    '''
+    p[0] = cdeclarations.TypeQualifier(p[1])
+
+def p_declarator(p):
+    '''declarator : pointer direct_declarator
+                  | direct_declarator
+    '''
+    if len(p) > 2:
+        p[0] = p[1]
+        ptr = p[1]
+        while ptr.pointer:
+            ptr = ptr.pointer
+        ptr.pointer = p[2]
+    else:
+        p[0] = p[1]
+
+def p_direct_declarator(p):
+    '''direct_declarator : IDENTIFIER
+                         | '(' declarator ')'
+                         | direct_declarator '[' constant_expression ']'
+                         | direct_declarator '[' ']'
+                         | direct_declarator '(' parameter_type_list ')'
+                         | direct_declarator '(' identifier_list ')'
+                         | direct_declarator '(' ')'
+    '''
+    if isinstance(p[1], cdeclarations.Declarator):
+        p[0] = p[1] 
+        if p[2] == '[':
+            a = cdeclarations.Array()
+            a.array = p[0].array
+            p[0].array = a
+            if p[3] != ']':
+                a.size = p[3]
+        else:
+            if p[3] == ')':
+                p[0].parameters = ()
+            else:
+                p[0].parameters = p[3]
+    elif p[1] == '(':
+        p[0] = p[2]
+    else:
+        p[0] = cdeclarations.Declarator()
+        p[0].identifier = p[1]
+
+    # Check parameters for (void) and simplify to empty tuple.
+    if p[0].parameters and len(p[0].parameters) == 1:
+        param = p[0].parameters[0]
+        if param.type.specifiers == ['void'] and not param.declarator:
+            p[0].parameters = ()
+
+
+def p_pointer(p):
+    '''pointer : '*'
+               | '*' type_qualifier_list
+               | '*' pointer
+               | '*' type_qualifier_list pointer
+    '''
+    if len(p) == 2:
+        p[0] = cdeclarations.Pointer()
+    elif len(p) == 3:
+        if type(p[2]) == cdeclarations.Pointer:
+            p[0] = cdeclarations.Pointer()
+            p[0].pointer = p[2]
+        else:
+            p[0] = cdeclarations.Pointer()
+            p[0].qualifiers = p[2]
+    else:
+        p[0] = cdeclarations.Pointer()
+        p[0].qualifiers = p[2]
+        p[0].pointer = p[3]
+
+def p_type_qualifier_list(p):
+    '''type_qualifier_list : type_qualifier
+                           | type_qualifier_list type_qualifier
+    '''
+    if len(p) > 2:
+        p[0] = p[1] + (p[2],)
+    else:
+        p[0] = (p[1],)
+
+def p_parameter_type_list(p):
+    '''parameter_type_list : parameter_list
+                           | parameter_list ',' ELLIPSIS
+    '''
+    if len(p) > 2:
+        p[0] = p[1] + (p[3],)
+    else:
+        p[0] = p[1]
+
+
+def p_parameter_list(p):
+    '''parameter_list : parameter_declaration
+                      | parameter_list ',' parameter_declaration
+    '''
+    if len(p) > 2:
+        p[0] = p[1] + (p[3],)
+    else:
+        p[0] = (p[1],)
+
+def p_parameter_declaration(p):
+    '''parameter_declaration : declaration_specifiers declarator
+                             | declaration_specifiers abstract_declarator
+                             | declaration_specifiers
+    '''
+    p[0] = cdeclarations.Parameter()
+    cdeclarations.apply_specifiers(p[1], p[0])
+    if len(p) > 2:
+        p[0].declarator = p[2]
+
+def p_identifier_list(p):
+    '''identifier_list : IDENTIFIER
+                       | identifier_list ',' IDENTIFIER
+    '''
+    param = cdeclarations.Parameter()
+    param.declarator = cdeclarations.Declarator()
+    if len(p) > 2:
+        param.declarator.identifier = p[3]
+        p[0] = p[1] + (param,)
+    else:
+        param.declarator.identifier = p[1]
+        p[0] = (param,)
+
+def p_type_name(p):
+    '''type_name : specifier_qualifier_list
+                 | specifier_qualifier_list abstract_declarator
+    '''
+    typ=p[1]
+    if len(p)==3:
+        declarator = p[2]
+    else:
+        declarator = None
+        
+    declaration = cdeclarations.Declaration()
+    declaration.declarator = declarator
+    cdeclarations.apply_specifiers(typ,declaration)
+    ctype = ctypesparser.get_ctypes_type(declaration.type,
+                                            declaration.declarator)
+    p[0] = ctype
+
+def p_abstract_declarator(p):
+    '''abstract_declarator : pointer
+                           | direct_abstract_declarator
+                           | pointer direct_abstract_declarator
+    '''
+    if len(p) == 2:
+        p[0] = p[1]
+        if type(p[0]) == cdeclarations.Pointer:
+            ptr = p[0]
+            while ptr.pointer:
+                ptr = ptr.pointer
+            # Only if doesn't already terminate in a declarator
+            if type(ptr) == cdeclarations.Pointer:
+                ptr.pointer = cdeclarations.Declarator()
+    else:
+        p[0] = p[1]
+        ptr = p[0]
+        while ptr.pointer:
+            ptr = ptr.pointer
+        ptr.pointer = p[2]
+
+def p_direct_abstract_declarator(p):
+    '''direct_abstract_declarator : '(' abstract_declarator ')'
+                      | '[' ']'
+                      | '[' constant_expression ']'
+                      | direct_abstract_declarator '[' ']'
+                      | direct_abstract_declarator '[' constant_expression ']'
+                      | '(' ')'
+                      | '(' parameter_type_list ')'
+                      | direct_abstract_declarator '(' ')'
+                      | direct_abstract_declarator '(' parameter_type_list ')'
+    '''
+    if p[1] == '(' and isinstance(p[2], cdeclarations.Declarator):
+        p[0] = p[2]
+    else:
+        if isinstance(p[1], cdeclarations.Declarator):
+            p[0] = p[1]
+            if p[2] == '[':
+                a = cdeclarations.Array()
+                a.array = p[0].array
+                p[0].array = a
+                if p[3] != ']':
+                    p[0].array.size = p[3]
+            elif p[2] == '(':
+                if p[3] == ')':
+                    p[0].parameters = ()
+                else:
+                    p[0].parameters = p[3]
+        else:
+            p[0] = cdeclarations.Declarator()
+            if p[1] == '[':
+                p[0].array = cdeclarations.Array()
+                if p[2] != ']':
+                    p[0].array.size = p[2]
+            elif p[1] == '(':
+                if p[2] == ')':
+                    p[0].parameters = ()
+                else:
+                    p[0].parameters = p[2]
+    
+    # Check parameters for (void) and simplify to empty tuple.
+    if p[0].parameters and len(p[0].parameters) == 1:
+        param = p[0].parameters[0]
+        if param.type.specifiers == ['void'] and not param.declarator:
+            p[0].parameters = ()
+
+def p_initializer(p):
+    '''initializer : assignment_expression
+                   | '{' initializer_list '}'
+                   | '{' initializer_list ',' '}'
+    '''
+
+def p_initializer_list(p):
+    '''initializer_list : initializer
+                        | initializer_list ',' initializer
+    '''
+
+def p_statement(p):
+    '''statement : labeled_statement
+                 | compound_statement
+                 | expression_statement
+                 | selection_statement
+                 | iteration_statement
+                 | jump_statement
+    '''
+
+def p_labeled_statement(p):
+    '''labeled_statement : IDENTIFIER ':' statement
+                         | CASE constant_expression ':' statement
+                         | DEFAULT ':' statement
+    '''
+
+def p_compound_statement(p):
+    '''compound_statement : '{' '}'
+                          | '{' statement_list '}'
+                          | '{' declaration_list '}'
+                          | '{' declaration_list statement_list '}'
+    '''
+
+def p_compound_statement_error(p):
+    '''compound_statement : '{' error '}'
+    '''
+    # Error resynchronisation catch-all
+
+def p_declaration_list(p):
+    '''declaration_list : declaration
+                        | declaration_list declaration
+    '''
+
+def p_statement_list(p):
+    '''statement_list : statement
+                      | statement_list statement
+    '''
+
+def p_expression_statement(p):
+    '''expression_statement : ';'
+                            | expression ';'
+    '''
+def p_expression_statement_error(p):
+    '''expression_statement : error ';'
+    '''
+    # Error resynchronisation catch-all
+
+def p_selection_statement(p):
+    '''selection_statement : IF '(' expression ')' statement
+                           | IF '(' expression ')' statement ELSE statement
+                           | SWITCH '(' expression ')' statement
+    '''
+
+def p_iteration_statement(p):
+    '''iteration_statement : WHILE '(' expression ')' statement
+    | DO statement WHILE '(' expression ')' ';'
+    | FOR '(' expression_statement expression_statement ')' statement
+    | FOR '(' expression_statement expression_statement expression ')' statement
+    '''	
+
+def p_jump_statement(p):
+    '''jump_statement : GOTO IDENTIFIER ';'
+                      | CONTINUE ';'
+                      | BREAK ';'
+                      | RETURN ';'
+                      | RETURN expression ';'
+    '''
+
+def p_external_declaration(p):
+    '''external_declaration : declaration 
+                            | function_definition
+    '''
+
+    # Intentionally empty
+
+def p_function_definition(p):
+    '''function_definition : declaration_specifiers declarator declaration_list compound_statement
+                        | declaration_specifiers declarator compound_statement
+                        | declarator declaration_list compound_statement
+                        | declarator compound_statement
+    '''
+
+def p_define(p):
+    '''define : PP_DEFINE PP_DEFINE_NAME PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_NAME type_name PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_NAME constant_expression PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_MACRO_NAME '(' ')' PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_MACRO_NAME '(' ')' constant_expression PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_MACRO_NAME '(' macro_parameter_list ')' PP_END_DEFINE
+              | PP_DEFINE PP_DEFINE_MACRO_NAME '(' macro_parameter_list ')' constant_expression PP_END_DEFINE
+    '''
+    
+    filename = p.slice[1].filename
+    lineno = p.slice[1].lineno
+    
+    if p[3] != '(':
+        if len(p) == 4:
+           p.parser.cparser.handle_define_constant(p[2], None, filename,
+                                                   lineno)
+        else:
+            p.parser.cparser.handle_define_constant(p[2], p[3], filename,
+                                                    lineno)
+    else:
+        if p[4] == ')':
+            params = []
+            if len(p) == 6:
+                expr = None
+            elif len(p) == 7:
+                expr = p[5]
+        else:
+            params = p[4]
+            if len(p) == 7:
+                expr = None
+            elif len(p) == 8:
+                expr = p[6]
+        
+        filename = p.slice[1].filename
+        lineno = p.slice[1].lineno
+        
+        p.parser.cparser.handle_define_macro(p[2], params, expr, filename, lineno)
+
+def p_define_error(p):
+    '''define : PP_DEFINE error PP_END_DEFINE'''
+    lexer = p[2].lexer
+    clexdata = lexer.tokens
+    start = end = p[2].clexpos
+    while clexdata[start].type != 'PP_DEFINE':
+        start -= 1
+    while clexdata[end].type != 'PP_END_DEFINE':
+        end += 1
+        
+    name = clexdata[start+1].value
+    if clexdata[start+1].type == 'PP_DEFINE_NAME':
+        params = None
+        contents = [t.value for t in clexdata[start+2:end]]
+    else:
+        end_of_param_list = start
+        while clexdata[end_of_param_list].value != ')' and \
+              end_of_param_list<end:
+            end_of_param_list += 1
+        params = [t.value for t in clexdata[start+3:end_of_param_list] if \
+                    t.value != ',']
+        contents = [t.value for t in clexdata[end_of_param_list+1:end]]
+    
+    filename = p.slice[1].filename
+    lineno = p.slice[1].lineno
+    
+    p[2].lexer.cparser.handle_define_unparseable(name, params, contents, \
+                                                 filename, lineno)
+
+def p_macro_parameter_list(p):
+    '''macro_parameter_list : PP_MACRO_PARAM
+                            | macro_parameter_list ',' PP_MACRO_PARAM
+    '''
+    if len(p)==2:
+        p[0] = [p[1]]
+    else:
+        p[1].append(p[3])
+        p[0] = p[1]
+
+def p_error(t):
+    if t.lexer.in_define:
+        # p_define_error will generate an error message.
+        pass
+    else:
+        if t.type == '$end':
+            t.parser.cparser.handle_error('Syntax error at end of file.', 
+                 t.filename, 0)
+        else:
+            t.lexer.cparser.handle_error('Syntax error at %r' % t.value, 
+                 t.filename, t.lineno)
+    # Don't alter lexer: default behaviour is to pass error production
+    # up until it hits the catch-all at declaration, at which point
+    # parsing continues (synchronisation).

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cparser.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cparser.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/cparser.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,208 @@
+#!/usr/bin/env python
+
+'''
+Parse a C source file.
+
+To use, subclass CParser and override its handle_* methods.  Then instantiate
+the class with a string to parse.
+'''
+
+__docformat__ = 'restructuredtext'
+
+import operator
+import os.path
+import re
+import sys
+import time
+import warnings
+
+import preprocessor
+import yacc
+import cgrammar
+import cdeclarations
+
+# --------------------------------------------------------------------------
+# Lexer
+# --------------------------------------------------------------------------
+
+class CLexer(object):
+    def __init__(self, cparser):
+        self.cparser = cparser
+        self.type_names = set()
+        self.in_define = False
+
+    def input(self, tokens):
+        self.tokens = tokens
+        self.pos = 0
+
+    def token(self):
+        while self.pos < len(self.tokens):
+            t = self.tokens[self.pos]
+            
+            self.pos += 1
+
+            if not t:
+                break
+            
+            if t.type == 'PP_DEFINE':
+                self.in_define = True
+            elif t.type == 'PP_END_DEFINE':
+                self.in_define = False
+            
+            # Transform PP tokens into C tokens
+            elif t.type == 'LPAREN':
+                t.type = '('
+            elif t.type == 'PP_NUMBER':
+                t.type = 'CONSTANT'
+            elif t.type == 'IDENTIFIER' and t.value in cgrammar.keywords:
+                t.type = t.value.upper()
+            elif t.type == 'IDENTIFIER' and t.value in self.type_names:
+                if (self.pos < 2 or self.tokens[self.pos-2].type not in
+                    ('ENUM', 'STRUCT', 'UNION')):
+                    t.type = 'TYPE_NAME'
+            
+            t.lexer = self
+            t.clexpos = self.pos - 1
+            
+            return t
+        return None
+        
+# --------------------------------------------------------------------------
+# Parser
+# --------------------------------------------------------------------------
+
+class CParser(object):
+    '''Parse a C source file.
+
+    Subclass and override the handle_* methods.  Call `parse` with a string
+    to parse.
+    '''
+    def __init__(self, options, stddef_types=True, gnu_types=True):
+        self.preprocessor_parser = preprocessor.PreprocessorParser(options,self)
+        self.parser = yacc.Parser()
+        prototype = yacc.yacc(method        = 'LALR',
+                              debug         = False,
+                              module        = cgrammar,
+                              write_tables  = True,
+                              outputdir     = os.path.dirname(__file__),
+                              optimize      = True)
+        
+        # If yacc is reading tables from a file, then it won't find the error
+        # function... need to set it manually
+        prototype.errorfunc = cgrammar.p_error
+        prototype.init_parser(self.parser)
+        self.parser.cparser = self
+
+        self.lexer = CLexer(self)
+        if stddef_types:
+            self.lexer.type_names.add('wchar_t')
+            self.lexer.type_names.add('ptrdiff_t')
+            self.lexer.type_names.add('size_t')
+        if gnu_types:
+            self.lexer.type_names.add('__builtin_va_list')
+        if sys.platform == 'win32':
+            self.lexer.type_names.add('__int64')
+        
+    def parse(self, filename, debug=False):
+        '''Parse a file.
+
+        If `debug` is True, parsing state is dumped to stdout.
+        '''
+
+        self.handle_status('Preprocessing %s' % filename)
+        self.preprocessor_parser.parse(filename)
+        self.lexer.input(self.preprocessor_parser.output)
+        self.handle_status('Parsing %s' % filename)
+        self.parser.parse(lexer=self.lexer, debug=debug)
+
+    # ----------------------------------------------------------------------
+    # Parser interface.  Override these methods in your subclass.
+    # ----------------------------------------------------------------------
+
+    def handle_error(self, message, filename, lineno):
+        '''A parse error occured.  
+        
+        The default implementation prints `lineno` and `message` to stderr.
+        The parser will try to recover from errors by synchronising at the
+        next semicolon.
+        '''
+        print >> sys.stderr, '%s:%s %s' % (filename, lineno, message)
+    
+    def handle_pp_error(self, message):
+        '''The C preprocessor emitted an error.
+        
+        The default implementatin prints the error to stderr. If processing
+        can continue, it will.
+        '''
+        print >> sys.stderr, 'Preprocessor:', message
+    
+    def handle_status(self, message):
+        '''Progress information.
+
+        The default implementationg prints message to stderr.
+        '''
+        print >> sys.stderr, message
+
+    def handle_define(self, name, params, value, filename, lineno):
+        '''#define `name` `value` 
+        or #define `name`(`params`) `value`
+
+        name is a string
+        params is None or a list of strings
+        value is a ...?
+        '''
+
+    def handle_define_constant(self, name, value, filename, lineno):
+        '''#define `name` `value`
+        
+        name is a string
+        value is an ExpressionNode or None
+        '''
+    
+    def handle_define_macro(self, name, params, value, filename, lineno):
+        '''#define `name`(`params`) `value`
+        
+        name is a string
+        params is a list of strings
+        value is an ExpressionNode or None
+        '''
+    
+    def impl_handle_declaration(self, declaration, filename, lineno):
+        '''Internal method that calls `handle_declaration`.  This method
+        also adds any new type definitions to the lexer's list of valid type
+        names, which affects the parsing of subsequent declarations.
+        '''
+        if declaration.storage == 'typedef':
+            declarator = declaration.declarator
+            if not declarator:
+                # XXX TEMPORARY while struct etc not filled
+                return
+            while declarator.pointer:
+                declarator = declarator.pointer
+            self.lexer.type_names.add(declarator.identifier)
+        self.handle_declaration(declaration, filename, lineno)
+
+    def handle_declaration(self, declaration, filename, lineno):
+        '''A declaration was encountered.  
+        
+        `declaration` is an instance of Declaration.  Where a declaration has
+        multiple initialisers, each is returned as a separate declaration.
+        '''
+        pass
+
+class DebugCParser(CParser):
+    '''A convenience class that prints each invocation of a handle_* method to
+    stdout.
+    '''
+
+    def handle_define(self, name, value, filename, lineno):
+        print '#define name=%r, value=%r' % (name, value)
+
+    def handle_define_constant(self, name, value, filename, lineno):
+        print '#define constant name=%r, value=%r' % (name, value)
+
+    def handle_declaration(self, declaration, filename, lineno):
+        print declaration
+        
+if __name__ == '__main__':
+    DebugCParser().parse(sys.argv[1], debug=True)

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/ctypesparser.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/ctypesparser.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/ctypesparser.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+
+'''
+ctypesgencore.parser.ctypesparser contains a class, CtypesParser, which is a
+subclass of ctypesgencore.parser.cparser.CParser. CtypesParser overrides the
+handle_declaration() method of CParser. It turns the low-level type declarations
+produced by CParser into CtypesType instances and breaks the parser's general
+declarations into function, variable, typedef, constant, and type descriptions.
+'''
+
+__docformat__ = 'restructuredtext'
+
+__all__ = ["CtypesParser"]
+
+from cparser import *
+from ctypesgencore.ctypedescs import *
+from cdeclarations import *
+from ctypesgencore.expressions import *
+
+def get_ctypes_type(typ, declarator, check_qualifiers=False):       
+    signed = True
+    typename = 'int'
+    longs = 0
+    t = None
+    
+    for specifier in typ.specifiers:
+        if isinstance(specifier, StructTypeSpecifier):
+            t = make_struct_from_specifier(specifier)
+        elif isinstance(specifier, EnumSpecifier):
+            t = make_enum_from_specifier(specifier)
+        elif specifier == 'signed':
+            signed = True
+        elif specifier == 'unsigned':
+            signed = False
+        elif specifier == 'long':
+            longs += 1
+        else:
+            typename = str(specifier)
+    
+    if not t:
+        # It is a numeric type of some sort
+        if (typename,signed,longs) in ctypes_type_map:
+            t = CtypesSimple(typename,signed,longs)
+        
+        elif signed and not longs:
+            t = CtypesTypedef(typename)
+        
+        else:
+            name = " ".join(typ.specifiers)
+            if typename in [x[0] for x in ctypes_type_map.keys()]:
+                # It's an unsupported variant of a builtin type
+                error = "Ctypes does not support the type \"%s\"." % name
+            else:
+                error = "Ctypes does not support adding additional " \
+                    "specifiers to typedefs, such as \"%s\"" % name
+            t = CtypesTypedef(name)
+            t.error(error,cls='unsupported-type')
+        
+        if declarator and declarator.bitfield:
+            t = CtypesBitfield(t,declarator.bitfield)
+
+    qualifiers = []
+    qualifiers.extend(typ.qualifiers)
+    while declarator and declarator.pointer:
+        if declarator.parameters is not None:
+            variadic = "..." in declarator.parameters
+
+            params = []
+            for param in declarator.parameters:
+                if param=="...":
+                    break
+                params.append(get_ctypes_type(param.type, param.declarator))
+            t = CtypesFunction(t, params, variadic)
+        
+        a = declarator.array
+        while a:
+            t = CtypesArray(t, a.size)
+            a = a.array
+
+        qualifiers.extend(declarator.qualifiers)
+        
+        t = CtypesPointer(t, declarator.qualifiers)
+        
+        declarator = declarator.pointer
+    
+    if declarator and declarator.parameters is not None:
+        variadic = "..." in declarator.parameters
+
+        params = []
+        for param in declarator.parameters:
+            if param=="...":
+                break
+            params.append(get_ctypes_type(param.type, param.declarator))
+        t = CtypesFunction(t, params, variadic)
+    
+    if declarator:
+        a = declarator.array
+        while a:
+            t = CtypesArray(t, a.size)
+            a = a.array
+    
+    if isinstance(t, CtypesPointer) and \
+       isinstance(t.destination, CtypesSimple) and \
+       t.destination.name=="char" and \
+       t.destination.signed:
+       t = CtypesSpecial("String")
+
+    return t
+
+def make_struct_from_specifier(specifier):
+    variety = {True:"union", False:"struct"}[specifier.is_union]
+    tag = specifier.tag
+    
+    if specifier.declarations:
+        members = []
+        for declaration in specifier.declarations:
+            t = get_ctypes_type(declaration.type,
+                                declaration.declarator,
+                                check_qualifiers=True)
+            declarator = declaration.declarator
+            if declarator is None:
+                # XXX TEMPORARY while struct with no typedef not filled in
+                break
+            while declarator.pointer:
+                declarator = declarator.pointer
+            name = declarator.identifier
+            members.append((name, remove_function_pointer(t)))
+    else:
+        members = None
+    
+    return CtypesStruct(tag,variety,members,
+                        src=(specifier.filename,specifier.lineno))
+
+def make_enum_from_specifier(specifier):
+    tag = specifier.tag
+    
+    enumerators = []
+    last_name = None
+    for e in specifier.enumerators:
+        if e.expression:
+            value = e.expression
+        else:
+            if last_name:
+                value = BinaryExpressionNode("addition", (lambda x,y:x+y),
+                    "(%s + %s)", (False,False),
+                    IdentifierExpressionNode(last_name),
+                    ConstantExpressionNode(1))
+            else:
+                value = ConstantExpressionNode(0)
+        
+        enumerators.append((e.name,value))
+        last_name = e.name
+    
+    return CtypesEnum(tag, enumerators,
+                      src=(specifier.filename,specifier.lineno))
+
+class CtypesParser(CParser):
+    '''Parse a C file for declarations that can be used by ctypes.
+    
+    Subclass and override the handle_ctypes_* methods.
+    '''
+
+    def handle_declaration(self, declaration, filename, lineno):
+        t = get_ctypes_type(declaration.type, declaration.declarator)
+        
+        if type(t) in (CtypesStruct, CtypesEnum):
+            self.handle_ctypes_new_type(
+                remove_function_pointer(t), filename, lineno)
+        
+        declarator = declaration.declarator
+        if declarator is None:
+            # XXX TEMPORARY while struct with no typedef not filled in
+            return
+        while declarator.pointer:
+            declarator = declarator.pointer
+        name = declarator.identifier
+        if declaration.storage == 'typedef':
+            self.handle_ctypes_typedef(
+                name, remove_function_pointer(t), filename, lineno)
+        elif type(t) == CtypesFunction:
+            self.handle_ctypes_function(
+                name, t.restype, t.argtypes, t.variadic, filename, lineno)
+        elif declaration.storage != 'static':
+            self.handle_ctypes_variable(name, t, filename, lineno)
+
+    # ctypes parser interface.  Override these methods in your subclass.
+    
+    def handle_ctypes_new_type(self, ctype, filename, lineno):
+        pass
+    
+    def handle_ctypes_typedef(self, name, ctype, filename, lineno):
+        pass
+
+    def handle_ctypes_function(self, name, restype, argtypes, filename, lineno):
+        pass
+
+    def handle_ctypes_variable(self, name, ctype, filename, lineno):
+        pass

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/datacollectingparser.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/datacollectingparser.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/datacollectingparser.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+
+"""
+DataCollectingParser subclasses ctypesparser.CtypesParser and builds Description
+objects from the CtypesType objects and other information from CtypesParser.
+After parsing is complete, a DescriptionCollection object can be retrieved by
+calling DataCollectingParser.data(). 
+"""
+
+import ctypesparser
+from ctypesgencore.descriptions import *
+from ctypesgencore.ctypedescs import *
+from ctypesgencore.expressions import *
+from ctypesgencore.messages import *
+from tempfile import mkstemp
+import os
+
+class DataCollectingParser(ctypesparser.CtypesParser,
+                           ctypesparser.CtypesTypeVisitor):
+    """Main class for the Parser component. Steps for use:
+    p=DataCollectingParser(names_of_header_files,options)
+    p.parse()
+    data=p.data() #A dictionary of constants, enums, structs, functions, etc.
+    """
+    def __init__(self,headers,options):
+        ctypesparser.CtypesParser.__init__(self,options)
+        self.headers=headers
+        self.options=options
+        
+        self.constants=[]
+        self.typedefs=[]
+        self.structs=[]
+        self.enums=[]
+        self.functions=[]
+        self.variables=[]
+        self.macros=[]
+        
+        self.all=[]
+        self.output_order=[]
+        
+        # NULL is a useful macro to have defined
+        null = ConstantExpressionNode(None)
+        nullmacro = ConstantDescription("NULL",null,("<built-in>",1))
+        self.constants.append(nullmacro)
+        self.all.append(nullmacro)
+        self.output_order.append(("constant", nullmacro))
+        
+        # A list of tuples describing macros; saved to be processed after
+        # everything else has been parsed
+        self.saved_macros = []
+        # A set of structs that are already known
+        self.already_seen_structs=set() 
+        # A dict of structs that have only been seen in opaque form
+        self.already_seen_opaque_structs={} 
+        # A set of enums that are already known
+        self.already_seen_enums=set() 
+        # A dict of enums that have only been seen in opaque form
+        self.already_seen_opaque_enums={}
+            
+    def parse(self):
+        fd, fname = mkstemp(suffix=".h")
+        f = os.fdopen(fd, 'w+b')
+        for header in self.options.other_headers:
+            print >>f, '#include <%s>' % header
+        for header in self.headers:
+            print >>f, '#include "%s"' % os.path.abspath(header)
+        f.flush()
+        f.close()
+        ctypesparser.CtypesParser.parse(self,fname,None)
+        os.remove(fname)
+
+        for name, params, expr, (filename,lineno) in self.saved_macros:
+            self.handle_macro(name, params, expr, filename, lineno)
+            
+    def handle_define_constant(self, name, expr, filename, lineno):
+        # Called by CParser
+        # Save to handle later
+        self.saved_macros.append((name, None, expr, (filename, lineno)))
+    
+    def handle_define_unparseable(self, name, params, value, filename, lineno):
+        # Called by CParser
+        if params:
+            original_string = "#define %s(%s) %s" % \
+                (name, ",".join(params), " ".join(value))
+        else:
+            original_string = "#define %s %s" % \
+                (name, " ".join(value))
+        macro = MacroDescription(name, params, None,
+                                 src = (filename,lineno))
+        macro.error("Could not parse macro \"%s\"" % original_string,
+                    cls = 'macro')
+        macro.original_string = original_string
+        self.macros.append(macro)
+        self.all.append(macro)
+        self.output_order.append(('macro',macro))
+    
+    def handle_define_macro(self, name, params, expr, filename, lineno):
+        # Called by CParser
+        # Save to handle later
+        self.saved_macros.append((name, params, expr, (filename,lineno)))
+    
+    def handle_ctypes_typedef(self, name, ctype, filename, lineno):
+        # Called by CtypesParser
+        ctype.visit(self)
+        
+        typedef=TypedefDescription(name,
+                                   ctype,
+                                   src=(filename,repr(lineno)))
+        
+        self.typedefs.append(typedef)
+        self.all.append(typedef)
+        self.output_order.append(('typedef',typedef))
+    
+    def handle_ctypes_new_type(self, ctype, filename, lineno):
+        # Called by CtypesParser
+        if isinstance(ctype,ctypesparser.CtypesEnum):
+            self.handle_enum(ctype, filename, lineno)
+        else:
+            self.handle_struct(ctype, filename, lineno)
+    
+    def handle_ctypes_function(self, name, restype, argtypes, variadic,
+                               filename, lineno):
+        # Called by CtypesParser
+        restype.visit(self)
+        for argtype in argtypes:
+            argtype.visit(self)
+        
+        function=FunctionDescription(name,
+                                     restype,
+                                     argtypes,
+                                     variadic = variadic,
+                                     src=(filename,repr(lineno)))
+        
+        self.functions.append(function)
+        self.all.append(function)
+        self.output_order.append(('function',function))
+
+    def handle_ctypes_variable(self, name, ctype, filename, lineno):
+        # Called by CtypesParser
+        ctype.visit(self)
+        
+        variable=VariableDescription(name,
+                                     ctype,
+                                     src=(filename,repr(lineno)))
+        
+        self.variables.append(variable)
+        self.all.append(variable)
+        self.output_order.append(('variable',variable))
+
+    def handle_struct(self, ctypestruct, filename, lineno):
+        # Called from within DataCollectingParser
+
+        # When we find an opaque struct, we make a StructDescription for it
+        # and record it in self.already_seen_opaque_structs. If we later
+        # find a transparent struct with the same tag, we fill in the
+        # opaque struct with the information from the transparent struct and
+        # move the opaque struct to the end of the struct list.
+        
+        name = "%s %s"%(ctypestruct.variety,ctypestruct.tag)
+        
+        if name in self.already_seen_structs:
+            return
+        
+        if ctypestruct.opaque:
+            if name not in self.already_seen_opaque_structs:
+                struct = StructDescription(ctypestruct.tag,
+                                           ctypestruct.variety,
+                                           None, # No members
+                                           True, # Opaque
+                                           ctypestruct,
+                                           src=(filename,str(lineno)))
+                
+                self.already_seen_opaque_structs[name]=struct
+                self.structs.append(struct)
+                self.all.append(struct)
+                self.output_order.append(('struct',struct))
+        
+        else:
+            for (membername,ctype) in ctypestruct.members:
+                ctype.visit(self)
+            
+            if name in self.already_seen_opaque_structs:
+                # Fill in older version
+                struct=self.already_seen_opaque_structs[name]
+                struct.opaque = False
+                struct.members = ctypestruct.members
+                struct.ctype = ctypestruct
+                struct.src = ctypestruct.src
+                
+                self.output_order.append(('struct-body',struct))
+                
+                del self.already_seen_opaque_structs[name]
+            
+            else:
+                struct = StructDescription(ctypestruct.tag,
+                                           ctypestruct.variety,
+                                           ctypestruct.members,
+                                           False, # Not opaque
+                                           src=(filename,str(lineno)),
+                                           ctype=ctypestruct)                
+                self.structs.append(struct)
+                self.all.append(struct)
+                self.output_order.append(('struct',struct))
+                self.output_order.append(('struct-body',struct))
+            
+            self.already_seen_structs.add(name)
+    
+    def handle_enum(self, ctypeenum, filename, lineno):
+        # Called from within DataCollectingParser.
+        
+        # Process for handling opaque enums is the same as process for opaque
+        # structs. See handle_struct() for more details.
+        
+        tag = ctypeenum.tag
+        if tag in self.already_seen_enums:
+            return
+            
+        if ctypeenum.opaque:
+            if tag not in self.already_seen_opaque_enums:
+                enum=EnumDescription(ctypeenum.tag,
+                             ctypeenum.enumerators,
+                             ctypeenum,
+                             src = (filename,str(lineno)))
+                enum.opaque = True
+                
+                self.already_seen_opaque_enums[tag]=enum
+                self.enums.append(enum)
+                self.all.append(enum)
+                self.output_order.append(('enum',enum))
+                
+        else:
+            if tag in self.already_seen_opaque_enums:
+                # Fill in older opaque version
+                enum = self.already_seen_opaque_enums[tag]
+                enum.opaque = False
+                enum.ctype = ctypeenum
+                enum.src = ctypeenum.src
+            
+                del self.already_seen_opaque_enums[tag]
+            
+            else:
+                enum=EnumDescription(ctypeenum.tag,
+                                None,
+                                src=(filename,str(lineno)),
+                                ctype=ctypeenum)
+                enum.opaque = False
+                
+                self.enums.append(enum)
+                self.all.append(enum)
+                self.output_order.append(('enum',enum))
+            
+            self.already_seen_enums.add(tag)
+            
+            for (enumname,expr) in ctypeenum.enumerators:                
+                constant=ConstantDescription(enumname, expr,
+                                             src=(filename,lineno))
+                
+                self.constants.append(constant)
+                self.all.append(constant)
+                self.output_order.append(('constant',constant))
+    
+    def handle_macro(self, name, params, expr, filename, lineno):
+        # Called from within DataCollectingParser
+        src = (filename,lineno)
+        
+        if expr==None:
+            expr = ConstantExpressionNode(True)
+            constant = ConstantDescription(name, expr, src)
+            self.constants.append(constant)
+            self.all.append(constant)
+            return
+        
+        expr.visit(self)
+        
+        if isinstance(expr,CtypesType):
+            if params:
+                macro = MacroDescription(name, "", src)
+                macro.error("%s has parameters but evaluates to a type. " \
+                    "Ctypesgen does not support it." % macro.casual_name(),
+                    cls = 'macro')
+                self.macros.append(macro)
+                self.all.append(macro)
+                self.output_order.append(('macro',macro))
+            
+            else:
+                typedef = TypedefDescription(name, expr, src)
+                self.typedefs.append(typedef)
+                self.all.append(typedef)
+                self.output_order.append(('typedef',typedef))
+        
+        else:
+            macro = MacroDescription(name, params, expr, src)
+            self.macros.append(macro)
+            self.all.append(macro)
+            self.output_order.append(('macro',macro))
+        
+        # Macros could possibly contain things like __FILE__, __LINE__, etc...
+        # This could be supported, but it would be a lot of work. It would
+        # probably also bloat the Preamble considerably.
+        
+    def handle_error(self, message, filename, lineno):
+        # Called by CParser
+        error_message("%s:%d: %s" % (filename,lineno,message), cls='cparser')
+    
+    def handle_pp_error(self, message):
+        # Called by PreprocessorParser
+        error_message("%s: %s" % (self.options.cpp, message), cls = 'cparser')
+    
+    def handle_status(self, message):
+        # Called by CParser
+        status_message(message)
+    
+    def visit_struct(self, struct):
+        self.handle_struct(struct, struct.src[0], struct.src[1])
+    
+    def visit_enum(self,enum):
+        self.handle_enum(enum, enum.src[0], enum.src[1])
+    
+    def data(self):
+        return DescriptionCollection(self.constants,
+                                     self.typedefs,
+                                     self.structs,
+                                     self.enums,
+                                     self.functions,
+                                     self.variables,
+                                     self.macros,
+                                     self.all,
+                                     self.output_order)

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lex.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lex.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lex.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,879 @@
+#-----------------------------------------------------------------------------
+# ply: lex.py
+#
+# Author: David M. Beazley (dave at dabeaz.com)
+# Modification for pyglet by Alex Holkner (alex.holkner at gmail.com)
+# Modification for ctypesgen by Tim Maxwell (timmaxw at gmail.com) <tm>
+#
+# Copyright (C) 2001-2006, David M. Beazley
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+# 
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+# 
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+# 
+# See the file LICENSE for a complete copy of the LGPL.
+#-----------------------------------------------------------------------------
+
+__version__ = "2.2"
+
+import re, sys, types, os.path
+
+# Regular expression used to match valid token names
+_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
+
+# Available instance types.  This is used when lexers are defined by a class.
+# It's a little funky because I want to preserve backwards compatibility
+# with Python 2.0 where types.ObjectType is undefined.
+
+try:
+   _INSTANCETYPE = (types.InstanceType, types.ObjectType)
+except AttributeError:
+   _INSTANCETYPE = types.InstanceType
+   class object: pass       # Note: needed if no new-style classes present
+
+# Exception thrown when invalid token encountered and no default error
+# handler is defined.
+class LexError(Exception):
+    def __init__(self,message,s):
+         self.args = (message,)
+         self.text = s
+
+# Token class
+class LexToken(object):
+    def __str__(self):
+        return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos)
+    def __repr__(self):
+        return str(self)
+    def skip(self,n):
+        self.lexer.skip(n)
+
+# -----------------------------------------------------------------------------
+# Lexer class
+#
+# This class encapsulates all of the methods and data associated with a lexer.
+#
+#    input()          -  Store a new string in the lexer
+#    token()          -  Get the next token
+# -----------------------------------------------------------------------------
+
+class Lexer:
+    def __init__(self):
+        self.lexre = None             # Master regular expression. This is a list of 
+                                      # tuples (re,findex) where re is a compiled
+                                      # regular expression and findex is a list
+                                      # mapping regex group numbers to rules
+        self.lexretext = None         # Current regular expression strings
+        self.lexstatere = {}          # Dictionary mapping lexer states to master regexs
+        self.lexstateretext = {}      # Dictionary mapping lexer states to regex strings
+        self.lexstate = "INITIAL"     # Current lexer state
+        self.lexstatestack = []       # Stack of lexer states
+        self.lexstateinfo = None      # State information
+        self.lexstateignore = {}      # Dictionary of ignored characters for each state
+        self.lexstateerrorf = {}      # Dictionary of error functions for each state
+        self.lexreflags = 0           # Optional re compile flags
+        self.lexdata = None           # Actual input data (as a string)
+        self.lexpos = 0               # Current position in input text
+        self.lexlen = 0               # Length of the input text
+        self.lexerrorf = None         # Error rule (if any)
+        self.lextokens = None         # List of valid tokens
+        self.lexignore = ""           # Ignored characters
+        self.lexliterals = ""         # Literal characters that can be passed through
+        self.lexmodule = None         # Module
+        self.lineno = 1               # Current line number
+        self.lexdebug = 0             # Debugging mode
+        self.lexoptimize = 0          # Optimized mode
+
+    def clone(self,object=None):
+        c = Lexer()
+        c.lexstatere = self.lexstatere
+        c.lexstateinfo = self.lexstateinfo
+        c.lexstateretext = self.lexstateretext
+        c.lexstate = self.lexstate
+        c.lexstatestack = self.lexstatestack
+        c.lexstateignore = self.lexstateignore
+        c.lexstateerrorf = self.lexstateerrorf
+        c.lexreflags = self.lexreflags
+        c.lexdata = self.lexdata
+        c.lexpos = self.lexpos
+        c.lexlen = self.lexlen
+        c.lextokens = self.lextokens
+        c.lexdebug = self.lexdebug
+        c.lineno = self.lineno
+        c.lexoptimize = self.lexoptimize
+        c.lexliterals = self.lexliterals
+        c.lexmodule   = self.lexmodule
+
+        # If the object parameter has been supplied, it means we are attaching the
+        # lexer to a new object.  In this case, we have to rebind all methods in
+        # the lexstatere and lexstateerrorf tables.
+
+        if object:
+            newtab = { }
+            for key, ritem in self.lexstatere.items():
+                newre = []
+                for cre, findex in ritem:
+                     newfindex = []
+                     for f in findex:
+                         if not f or not f[0]:
+                             newfindex.append(f)
+                             continue
+                         newfindex.append((getattr(object,f[0].__name__),f[1]))
+                newre.append((cre,newfindex))
+                newtab[key] = newre
+            c.lexstatere = newtab
+            c.lexstateerrorf = { }
+            for key, ef in self.lexstateerrorf.items():
+                c.lexstateerrorf[key] = getattr(object,ef.__name__)
+            c.lexmodule = object
+
+        # Set up other attributes
+        c.begin(c.lexstate)
+        return c
+
+    # ------------------------------------------------------------
+    # writetab() - Write lexer information to a table file
+    # ------------------------------------------------------------
+    # <tm> 25 June 2008 added 'outputdir'
+    def writetab(self,tabfile,outputdir=''):
+        tf = open(os.path.join(outputdir,tabfile)+".py","w")
+        tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__))
+        tf.write("_lextokens    = %s\n" % repr(self.lextokens))
+        tf.write("_lexreflags   = %s\n" % repr(self.lexreflags))
+        tf.write("_lexliterals  = %s\n" % repr(self.lexliterals))
+        tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo))
+        
+        tabre = { }
+        for key, lre in self.lexstatere.items():
+             titem = []
+             for i in range(len(lre)):
+                  titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1])))
+             tabre[key] = titem
+
+        tf.write("_lexstatere   = %s\n" % repr(tabre))
+        tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore))
+
+        taberr = { }
+        for key, ef in self.lexstateerrorf.items():
+             if ef:
+                  taberr[key] = ef.__name__
+             else:
+                  taberr[key] = None
+        tf.write("_lexstateerrorf = %s\n" % repr(taberr))
+        tf.close()
+
+    # ------------------------------------------------------------
+    # readtab() - Read lexer information from a tab file
+    # ------------------------------------------------------------
+    def readtab(self,tabfile,fdict):
+        exec "import %s as lextab" % tabfile
+        self.lextokens      = lextab._lextokens
+        self.lexreflags     = lextab._lexreflags
+        self.lexliterals    = lextab._lexliterals
+        self.lexstateinfo   = lextab._lexstateinfo
+        self.lexstateignore = lextab._lexstateignore
+        self.lexstatere     = { }
+        self.lexstateretext = { }
+        for key,lre in lextab._lexstatere.items():
+             titem = []
+             txtitem = []
+             for i in range(len(lre)):
+                  titem.append((re.compile(lre[i][0],lextab._lexreflags),_names_to_funcs(lre[i][1],fdict)))
+                  txtitem.append(lre[i][0])
+             self.lexstatere[key] = titem
+             self.lexstateretext[key] = txtitem
+        self.lexstateerrorf = { }
+        for key,ef in lextab._lexstateerrorf.items():
+             self.lexstateerrorf[key] = fdict[ef]
+        self.begin('INITIAL')
+         
+    # ------------------------------------------------------------
+    # input() - Push a new string into the lexer
+    # ------------------------------------------------------------
+    def input(self,s):
+        if not (isinstance(s,types.StringType) or isinstance(s,types.UnicodeType)):
+            raise ValueError, "Expected a string"
+        self.lexdata = s
+        self.lexpos = 0
+        self.lexlen = len(s)
+
+    # ------------------------------------------------------------
+    # begin() - Changes the lexing state
+    # ------------------------------------------------------------
+    def begin(self,state):
+        if not self.lexstatere.has_key(state):
+            raise ValueError, "Undefined state"
+        self.lexre = self.lexstatere[state]
+        self.lexretext = self.lexstateretext[state]
+        self.lexignore = self.lexstateignore.get(state,"")
+        self.lexerrorf = self.lexstateerrorf.get(state,None)
+        self.lexstate = state
+
+    # ------------------------------------------------------------
+    # push_state() - Changes the lexing state and saves old on stack
+    # ------------------------------------------------------------
+    def push_state(self,state):
+        self.lexstatestack.append(self.lexstate)
+        self.begin(state)
+
+    # ------------------------------------------------------------
+    # pop_state() - Restores the previous state
+    # ------------------------------------------------------------
+    def pop_state(self):
+        self.begin(self.lexstatestack.pop())
+
+    # ------------------------------------------------------------
+    # current_state() - Returns the current lexing state
+    # ------------------------------------------------------------
+    def current_state(self):
+        return self.lexstate
+
+    # ------------------------------------------------------------
+    # skip() - Skip ahead n characters
+    # ------------------------------------------------------------
+    def skip(self,n):
+        self.lexpos += n
+
+    # ------------------------------------------------------------
+    # token() - Return the next token from the Lexer
+    #
+    # Note: This function has been carefully implemented to be as fast
+    # as possible.  Don't make changes unless you really know what
+    # you are doing
+    # ------------------------------------------------------------
+    def token(self):
+        # Make local copies of frequently referenced attributes
+        lexpos    = self.lexpos
+        lexlen    = self.lexlen
+        lexignore = self.lexignore
+        lexdata   = self.lexdata
+
+        while lexpos < lexlen:
+            # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
+            if lexdata[lexpos] in lexignore:
+                lexpos += 1
+                continue
+
+            # Look for a regular expression match
+            for lexre,lexindexfunc in self.lexre:
+                m = lexre.match(lexdata,lexpos)
+                if not m: continue
+
+                # Set last match in lexer so that rules can access it if they want
+                self.lexmatch = m
+
+                # Create a token for return
+                tok = LexToken()
+                tok.value = m.group()
+                tok.groups = m.groups()
+                tok.lineno = self.lineno
+                tok.lexpos = lexpos
+                tok.lexer = self
+
+                lexpos = m.end()
+                i = m.lastindex
+                func,tok.type = lexindexfunc[i]
+                self.lexpos = lexpos
+
+                if not func:
+                   # If no token type was set, it's an ignored token
+                   if tok.type: return tok      
+                   break
+
+                # if func not callable, it means it's an ignored token                
+                if not callable(func):
+                   break 
+
+                # If token is processed by a function, call it
+                newtok = func(tok)
+                
+                # Every function must return a token, if nothing, we just move to next token
+                if not newtok: 
+                    lexpos = self.lexpos        # This is here in case user has updated lexpos.
+                    
+                    # Added for pyglet/tools/wrapper/cparser.py by Alex
+                    # Holkner on 20/Jan/2007 
+                    lexdata = self.lexdata
+                    break
+                
+                # Verify type of the token.  If not in the token map, raise an error
+                if not self.lexoptimize:
+                    # Allow any single-character literal also for
+                    # pyglet/tools/wrapper/cparser.py by Alex Holkner on
+                    # 20/Jan/2007 
+                    if not self.lextokens.has_key(newtok.type) and len(newtok.type) > 1:
+                        raise LexError, ("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
+                            func.func_code.co_filename, func.func_code.co_firstlineno,
+                            func.__name__, newtok.type),lexdata[lexpos:])
+
+                return newtok
+            else:
+                # No match, see if in literals
+                if lexdata[lexpos] in self.lexliterals:
+                    tok = LexToken()
+                    tok.value = lexdata[lexpos]
+                    tok.lineno = self.lineno
+                    tok.lexer = self
+                    tok.type = tok.value
+                    tok.lexpos = lexpos
+                    self.lexpos = lexpos + 1
+                    return tok
+        
+                # No match. Call t_error() if defined.
+                if self.lexerrorf:
+                    tok = LexToken()
+                    tok.value = self.lexdata[lexpos:]
+                    tok.lineno = self.lineno
+                    tok.type = "error"
+                    tok.lexer = self
+                    tok.lexpos = lexpos
+                    self.lexpos = lexpos
+                    newtok = self.lexerrorf(tok)
+                    if lexpos == self.lexpos:
+                        # Error method didn't change text position at all. This is an error.
+                        raise LexError, ("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
+                    lexpos = self.lexpos
+                    if not newtok: continue
+                    return newtok
+
+                self.lexpos = lexpos
+                raise LexError, ("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:])
+
+        self.lexpos = lexpos + 1
+        if self.lexdata is None:
+             raise RuntimeError, "No input string given with input()"
+        return None
+        
+# -----------------------------------------------------------------------------
+# _validate_file()
+#
+# This checks to see if there are duplicated t_rulename() functions or strings
+# in the parser input file.  This is done using a simple regular expression
+# match on each line in the filename.
+# -----------------------------------------------------------------------------
+
+def _validate_file(filename):
+    import os.path
+    base,ext = os.path.splitext(filename)
+    if ext != '.py': return 1        # No idea what the file is. Return OK
+
+    try:
+        f = open(filename)
+        lines = f.readlines()
+        f.close()
+    except IOError:
+        return 1                       # Oh well
+
+    fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
+    sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
+    counthash = { }
+    linen = 1
+    noerror = 1
+    for l in lines:
+        m = fre.match(l)
+        if not m:
+            m = sre.match(l)
+        if m:
+            name = m.group(1)
+            prev = counthash.get(name)
+            if not prev:
+                counthash[name] = linen
+            else:
+                print "%s:%d: Rule %s redefined. Previously defined on line %d" % (filename,linen,name,prev)
+                noerror = 0
+        linen += 1
+    return noerror
+
+# -----------------------------------------------------------------------------
+# _funcs_to_names()
+#
+# Given a list of regular expression functions, this converts it to a list
+# suitable for output to a table file
+# -----------------------------------------------------------------------------
+
+def _funcs_to_names(funclist):
+    result = []
+    for f in funclist:
+         if f and f[0]:
+             result.append((f[0].__name__,f[1]))
+         else:
+             result.append(f)
+    return result
+
+# -----------------------------------------------------------------------------
+# _names_to_funcs()
+#
+# Given a list of regular expression function names, this converts it back to
+# functions.
+# -----------------------------------------------------------------------------
+
+def _names_to_funcs(namelist,fdict):
+     result = []
+     for n in namelist:
+          if n and n[0]:
+              result.append((fdict[n[0]],n[1]))
+          else:
+              result.append(n)
+     return result
+
+# -----------------------------------------------------------------------------
+# _form_master_re()
+#
+# This function takes a list of all of the regex components and attempts to
+# form the master regular expression.  Given limitations in the Python re
+# module, it may be necessary to break the master regex into separate expressions.
+# -----------------------------------------------------------------------------
+
+def _form_master_re(relist,reflags,ldict):
+    if not relist: return []
+    regex = "|".join(relist)
+    try:
+        lexre = re.compile(regex,re.VERBOSE | reflags)
+
+        # Build the index to function map for the matching engine
+        lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1)
+        for f,i in lexre.groupindex.items():
+            handle = ldict.get(f,None)
+            if type(handle) in (types.FunctionType, types.MethodType):
+                lexindexfunc[i] = (handle,handle.__name__[2:])
+            elif handle is not None:
+                # If rule was specified as a string, we build an anonymous
+                # callback function to carry out the action
+                if f.find("ignore_") > 0:
+                    lexindexfunc[i] = (None,None)
+                    print "IGNORE", f
+                else:
+                    lexindexfunc[i] = (None, f[2:])
+         
+        return [(lexre,lexindexfunc)],[regex]
+    except Exception,e:
+        m = int(len(relist)/2)
+        if m == 0: m = 1
+        llist, lre = _form_master_re(relist[:m],reflags,ldict)
+        rlist, rre = _form_master_re(relist[m:],reflags,ldict)
+        return llist+rlist, lre+rre
+
+# -----------------------------------------------------------------------------
+# def _statetoken(s,names)
+#
+# Given a declaration name s of the form "t_" and a dictionary whose keys are
+# state names, this function returns a tuple (states,tokenname) where states
+# is a tuple of state names and tokenname is the name of the token.  For example,
+# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
+# -----------------------------------------------------------------------------
+
+def _statetoken(s,names):
+    nonstate = 1
+    parts = s.split("_")
+    for i in range(1,len(parts)):
+        if not names.has_key(parts[i]) and parts[i] != 'ANY': break
+    if i > 1:
+       states = tuple(parts[1:i])
+    else:
+       states = ('INITIAL',)
+
+    if 'ANY' in states:
+       states = tuple(names.keys())
+      
+    tokenname = "_".join(parts[i:])
+    return (states,tokenname)
+
+# -----------------------------------------------------------------------------
+# lex(module)
+#
+# Build all of the regular expression rules from definitions in the supplied module
+# -----------------------------------------------------------------------------
+# cls added for pyglet/tools/wrapper/cparser.py by Alex Holkner on 22/Jan/2007 
+# <tm> 25 June 2008 added 'outputdir'
+def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir='',cls=Lexer):
+    global lexer
+    ldict = None
+    stateinfo  = { 'INITIAL' : 'inclusive'}
+    error = 0
+    files = { }
+    lexobj = cls()
+    lexobj.lexdebug = debug
+    lexobj.lexoptimize = optimize
+    global token,input
+
+    if nowarn: warn = 0
+    else: warn = 1
+    
+    if object: module = object
+
+    if module:
+        # User supplied a module object.
+        if isinstance(module, types.ModuleType):
+            ldict = module.__dict__
+        elif isinstance(module, _INSTANCETYPE):
+            _items = [(k,getattr(module,k)) for k in dir(module)]
+            ldict = { }
+            for (i,v) in _items:
+                ldict[i] = v
+        else:
+            raise ValueError,"Expected a module or instance"
+        lexobj.lexmodule = module
+        
+    else:
+        # No module given.  We might be able to get information from the caller.
+        try:
+            raise RuntimeError
+        except RuntimeError:
+            e,b,t = sys.exc_info()
+            f = t.tb_frame
+            f = f.f_back           # Walk out to our calling function
+            ldict = f.f_globals    # Grab its globals dictionary
+
+    if optimize and lextab:
+        try:
+            lexobj.readtab(lextab,ldict)
+            token = lexobj.token
+            input = lexobj.input
+            lexer = lexobj
+            return lexobj
+        
+        except ImportError:
+            pass
+        
+    # Get the tokens, states, and literals variables (if any)
+    if (module and isinstance(module,_INSTANCETYPE)):
+        tokens   = getattr(module,"tokens",None)
+        states   = getattr(module,"states",None)
+        literals = getattr(module,"literals","")
+    else:
+        tokens   = ldict.get("tokens",None)
+        states   = ldict.get("states",None)
+        literals = ldict.get("literals","")
+        
+    if not tokens:
+        raise SyntaxError,"lex: module does not define 'tokens'"
+    if not (isinstance(tokens,types.ListType) or isinstance(tokens,types.TupleType)):
+        raise SyntaxError,"lex: tokens must be a list or tuple."
+
+    # Build a dictionary of valid token names
+    lexobj.lextokens = { }
+    if not optimize:
+        for n in tokens:
+            if not _is_identifier.match(n):
+                print "lex: Bad token name '%s'" % n
+                error = 1
+            if warn and lexobj.lextokens.has_key(n):
+                print "lex: Warning. Token '%s' multiply defined." % n
+            lexobj.lextokens[n] = None
+    else:
+        for n in tokens: lexobj.lextokens[n] = None
+
+    if debug:
+        print "lex: tokens = '%s'" % lexobj.lextokens.keys()
+
+    try:
+         for c in literals:
+               if not (isinstance(c,types.StringType) or isinstance(c,types.UnicodeType)) or len(c) > 1:
+                    print "lex: Invalid literal %s. Must be a single character" % repr(c)
+                    error = 1
+                    continue
+
+    except TypeError:
+         print "lex: Invalid literals specification. literals must be a sequence of characters."
+         error = 1
+
+    lexobj.lexliterals = literals
+
+    # Build statemap
+    if states:
+         if not (isinstance(states,types.TupleType) or isinstance(states,types.ListType)):
+              print "lex: states must be defined as a tuple or list."
+              error = 1
+         else:
+              for s in states:
+                    if not isinstance(s,types.TupleType) or len(s) != 2:
+                           print "lex: invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')" % repr(s)
+                           error = 1
+                           continue
+                    name, statetype = s
+                    if not isinstance(name,types.StringType):
+                           print "lex: state name %s must be a string" % repr(name)
+                           error = 1
+                           continue
+                    if not (statetype == 'inclusive' or statetype == 'exclusive'):
+                           print "lex: state type for state %s must be 'inclusive' or 'exclusive'" % name
+                           error = 1
+                           continue
+                    if stateinfo.has_key(name):
+                           print "lex: state '%s' already defined." % name
+                           error = 1
+                           continue
+                    stateinfo[name] = statetype
+
+    # Get a list of symbols with the t_ or s_ prefix
+    tsymbols = [f for f in ldict.keys() if f[:2] == 't_' ]
+
+    # Now build up a list of functions and a list of strings
+
+    funcsym =  { }        # Symbols defined as functions
+    strsym =   { }        # Symbols defined as strings
+    toknames = { }        # Mapping of symbols to token names
+
+    for s in stateinfo.keys():
+         funcsym[s] = []
+         strsym[s] = []
+
+    ignore   = { }        # Ignore strings by state
+    errorf   = { }        # Error functions by state
+
+    if len(tsymbols) == 0:
+        raise SyntaxError,"lex: no rules of the form t_rulename are defined."
+
+    for f in tsymbols:
+        t = ldict[f]
+        states, tokname = _statetoken(f,stateinfo)
+        toknames[f] = tokname
+
+        if callable(t):
+            for s in states: funcsym[s].append((f,t))
+        elif (isinstance(t, types.StringType) or isinstance(t,types.UnicodeType)):
+            for s in states: strsym[s].append((f,t))
+        else:
+            print "lex: %s not defined as a function or string" % f
+            error = 1
+
+    # Sort the functions by line number
+    for f in funcsym.values():
+        f.sort(lambda x,y: cmp(x[1].func_code.co_firstlineno,y[1].func_code.co_firstlineno))
+
+    # Sort the strings by regular expression length
+    for s in strsym.values():
+        s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1])))
+
+    regexs = { }
+
+    # Build the master regular expressions
+    for state in stateinfo.keys():
+        regex_list = []
+
+        # Add rules defined by functions first
+        for fname, f in funcsym[state]:
+            line = f.func_code.co_firstlineno
+            file = f.func_code.co_filename
+            files[file] = None
+            tokname = toknames[fname]
+
+            ismethod = isinstance(f, types.MethodType)
+
+            if not optimize:
+                nargs = f.func_code.co_argcount
+                if ismethod:
+                    reqargs = 2
+                else:
+                    reqargs = 1
+                if nargs > reqargs:
+                    print "%s:%d: Rule '%s' has too many arguments." % (file,line,f.__name__)
+                    error = 1
+                    continue
+
+                if nargs < reqargs:
+                    print "%s:%d: Rule '%s' requires an argument." % (file,line,f.__name__)
+                    error = 1
+                    continue
+
+                if tokname == 'ignore':
+                    print "%s:%d: Rule '%s' must be defined as a string." % (file,line,f.__name__)
+                    error = 1
+                    continue
+        
+            if tokname == 'error':
+                errorf[state] = f
+                continue
+
+            if f.__doc__:
+                if not optimize:
+                    try:
+                        c = re.compile("(?P<%s>%s)" % (f.__name__,f.__doc__), re.VERBOSE | reflags)
+                        if c.match(""):
+                             print "%s:%d: Regular expression for rule '%s' matches empty string." % (file,line,f.__name__)
+                             error = 1
+                             continue
+                    except re.error,e:
+                        print "%s:%d: Invalid regular expression for rule '%s'. %s" % (file,line,f.__name__,e)
+                        if '#' in f.__doc__:
+                             print "%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'." % (file,line, f.__name__)                 
+                        error = 1
+                        continue
+
+                    if debug:
+                        print "lex: Adding rule %s -> '%s' (state '%s')" % (f.__name__,f.__doc__, state)
+
+                # Okay. The regular expression seemed okay.  Let's append it to the master regular
+                # expression we're building
+  
+                regex_list.append("(?P<%s>%s)" % (f.__name__,f.__doc__))
+            else:
+                print "%s:%d: No regular expression defined for rule '%s'" % (file,line,f.__name__)
+
+        # Now add all of the simple rules
+        for name,r in strsym[state]:
+            tokname = toknames[name]       
+
+            if tokname == 'ignore':
+                 ignore[state] = r
+                 continue
+
+            if not optimize:
+                if tokname == 'error':
+                    raise SyntaxError,"lex: Rule '%s' must be defined as a function" % name
+                    error = 1
+                    continue
+        
+                if not lexobj.lextokens.has_key(tokname) and tokname.find("ignore_") < 0:
+                    print "lex: Rule '%s' defined for an unspecified token %s." % (name,tokname)
+                    error = 1
+                    continue
+                try:
+                    c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | reflags)
+                    if (c.match("")):
+                         print "lex: Regular expression for rule '%s' matches empty string." % name
+                         error = 1
+                         continue
+                except re.error,e:
+                    print "lex: Invalid regular expression for rule '%s'. %s" % (name,e)
+                    if '#' in r:
+                         print "lex: Make sure '#' in rule '%s' is escaped with '\\#'." % name
+
+                    error = 1
+                    continue
+                if debug:
+                    print "lex: Adding rule %s -> '%s' (state '%s')" % (name,r,state)
+                
+            regex_list.append("(?P<%s>%s)" % (name,r))
+
+        if not regex_list:
+             print "lex: No rules defined for state '%s'" % state
+             error = 1
+
+        regexs[state] = regex_list
+
+
+    if not optimize:
+        for f in files.keys(): 
+           if not _validate_file(f):
+                error = 1
+
+    if error:
+        raise SyntaxError,"lex: Unable to build lexer."
+
+    # From this point forward, we're reasonably confident that we can build the lexer.
+    # No more errors will be generated, but there might be some warning messages.
+
+    # Build the master regular expressions
+
+    for state in regexs.keys():
+        lexre, re_text = _form_master_re(regexs[state],reflags,ldict)
+        lexobj.lexstatere[state] = lexre
+        lexobj.lexstateretext[state] = re_text
+        if debug:
+            for i in range(len(re_text)):
+                 print "lex: state '%s'. regex[%d] = '%s'" % (state, i, re_text[i])
+
+    # For inclusive states, we need to add the INITIAL state
+    for state,type in stateinfo.items():
+        if state != "INITIAL" and type == 'inclusive':
+             lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
+             lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
+
+    lexobj.lexstateinfo = stateinfo
+    lexobj.lexre = lexobj.lexstatere["INITIAL"]
+    lexobj.lexretext = lexobj.lexstateretext["INITIAL"]
+
+    # Set up ignore variables
+    lexobj.lexstateignore = ignore
+    lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","")
+
+    # Set up error functions
+    lexobj.lexstateerrorf = errorf
+    lexobj.lexerrorf = errorf.get("INITIAL",None)
+    if warn and not lexobj.lexerrorf:
+        print "lex: Warning. no t_error rule is defined."
+
+    # Check state information for ignore and error rules
+    for s,stype in stateinfo.items():
+        if stype == 'exclusive':
+              if warn and not errorf.has_key(s):
+                   print "lex: Warning. no error rule is defined for exclusive state '%s'" % s
+              if warn and not ignore.has_key(s) and lexobj.lexignore:
+                   print "lex: Warning. no ignore rule is defined for exclusive state '%s'" % s
+        elif stype == 'inclusive':
+              if not errorf.has_key(s):
+                   errorf[s] = errorf.get("INITIAL",None)
+              if not ignore.has_key(s):
+                   ignore[s] = ignore.get("INITIAL","")
+   
+
+    # Create global versions of the token() and input() functions
+    token = lexobj.token
+    input = lexobj.input
+    lexer = lexobj
+
+    # If in optimize mode, we write the lextab   
+    if lextab and optimize:
+        lexobj.writetab(lextab,outputdir)
+
+    return lexobj
+
+# -----------------------------------------------------------------------------
+# runmain()
+#
+# This runs the lexer as a main program
+# -----------------------------------------------------------------------------
+
+def runmain(lexer=None,data=None):
+    if not data:
+        try:
+            filename = sys.argv[1]
+            f = open(filename)
+            data = f.read()
+            f.close()
+        except IndexError:
+            print "Reading from standard input (type EOF to end):"
+            data = sys.stdin.read()
+
+    if lexer:
+        _input = lexer.input
+    else:
+        _input = input
+    _input(data)
+    if lexer:
+        _token = lexer.token
+    else:
+        _token = token
+        
+    while 1:
+        tok = _token()
+        if not tok: break
+        print "(%s,%r,%d,%d)" % (tok.type, tok.value, tok.lineno,tok.lexpos)
+        
+
+# -----------------------------------------------------------------------------
+# @TOKEN(regex)
+#
+# This decorator function can be used to set the regex expression on a function
+# when its docstring might need to be set in an alternative way
+# -----------------------------------------------------------------------------
+
+def TOKEN(r):
+    def set_doc(f):
+        f.__doc__ = r
+        return f
+    return set_doc
+
+# Alternative spelling of the TOKEN decorator
+Token = TOKEN
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lextab.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lextab.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/lextab.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,8 @@
+# lextab.py. This file automatically created by PLY (version 2.2). Don't edit!
+_lextokens    = {'RIGHT_OP': None, 'RIGHT_ASSIGN': None, 'DEC_OP': None, 'PP_MACRO_PARAM': None, 'DIV_ASSIGN': None, 'PP_DEFINE': None, 'PP_END_DEFINE': None, 'PP_DEFINE_MACRO_NAME': None, 'HEADER_NAME': None, 'NEWLINE': None, 'CHARACTER_CONSTANT': None, 'PP_STRINGIFY': None, 'AND_ASSIGN': None, 'PTR_OP': None, 'ELLIPSIS': None, 'IDENTIFIER': None, 'ADD_ASSIGN': None, 'PERIOD': None, 'AND_OP': None, 'OTHER': None, 'LPAREN': None, 'LEFT_OP': None, 'LE_OP': None, 'OR_OP': None, 'SUB_ASSIGN': None, 'MOD_ASSIGN': None, 'STRING_LITERAL': None, 'PP_IDENTIFIER_PASTE': None, 'PP_NUMBER': None, 'PP_DEFINE_NAME': None, 'XOR_ASSIGN': None, 'OR_ASSIGN': None, 'GE_OP': None, 'MUL_ASSIGN': None, 'LEFT_ASSIGN': None, 'INC_OP': None, 'NE_OP': None, 'EQ_OP': None}
+_lexreflags   = 0
+_lexliterals  = ''
+_lexstateinfo = {'INITIAL': 'inclusive', 'DEFINE': 'exclusive'}
+_lexstatere   = {'INITIAL': [('(?P<t_ANY_directive>\\#\\s+(\\d+)\\s+"([^"]+)"[ \\d]*\\n)|(?P<t_ANY_punctuator>(\\.\\.\\.|\\|\\||\\+\\+|\\*=|\\^=|<<=|>>=|\\|=|\\+=|>=|>>|<<|<=|<:|%=|:>|<%|!=|\\)|\\+|\\*|\\.|\\?|==|&=|&&|\\[|\\^|--|/=|%>|-=|->|\\||!|%|&|-|,|/|;|:|=|>|]|<|{|}|~))', [None, ('t_ANY_directive', 'ANY_directive'), None, None, ('t_ANY_punctuator', 'ANY_punctuator')]), ('(?P<t_INITIAL_identifier>[a-zA-Z_]([a-zA-Z_]|[0-9])*)', [None, ('t_INITIAL_identifier', 'INITIAL_identifier')]), ('(?P<t_ANY_float>(?P<p1>[0-9]+)?(?P<dp>[.]?)(?P<p2>(?(p1)[0-9]*|[0-9]+))(?P<exp>(?:[Ee][+-]?[0-9]+)?)(?P<suf>[FflL]?)(?!\\w))', [None, ('t_ANY_float', 'ANY_float'), None, None, None, None, None]), ('(?P<t_ANY_int>(?P<p1>(?:0x[a-fA-F0-9]+)|(?:[0-9]+))(?P<suf>[uUlL]*))', [None, ('t_ANY_int', 'ANY_int'), None, None]), ('(?P<t_ANY_character_constant>L?\'(\\\\.|[^\\\\\'])+\')|(?P<t_ANY_string_literal>L?"(\\\\.|[^\\\\"])*")|(?P<t_ANY_lparen>\\()|(?P<t_INITIAL_newline>\\n)|(?P<t_INITIAL_pp_define
 >\\#define)', [None, ('t_ANY_character_constant', 'ANY_character_constant'), None, ('t_ANY_string_literal', 'ANY_string_literal'), None, ('t_ANY_lparen', 'ANY_lparen'), ('t_INITIAL_newline', 'INITIAL_newline'), ('t_INITIAL_pp_define', 'INITIAL_pp_define')])], 'DEFINE': [('(?P<t_ANY_directive>\\#\\s+(\\d+)\\s+"([^"]+)"[ \\d]*\\n)|(?P<t_ANY_punctuator>(\\.\\.\\.|\\|\\||\\+\\+|\\*=|\\^=|<<=|>>=|\\|=|\\+=|>=|>>|<<|<=|<:|%=|:>|<%|!=|\\)|\\+|\\*|\\.|\\?|==|&=|&&|\\[|\\^|--|/=|%>|-=|->|\\||!|%|&|-|,|/|;|:|=|>|]|<|{|}|~))', [None, ('t_ANY_directive', 'ANY_directive'), None, None, ('t_ANY_punctuator', 'ANY_punctuator')]), ('(?P<t_DEFINE_identifier>[a-zA-Z_]([a-zA-Z_]|[0-9])*)', [None, ('t_DEFINE_identifier', 'DEFINE_identifier')]), ('(?P<t_ANY_float>(?P<p1>[0-9]+)?(?P<dp>[.]?)(?P<p2>(?(p1)[0-9]*|[0-9]+))(?P<exp>(?:[Ee][+-]?[0-9]+)?)(?P<suf>[FflL]?)(?!\\w))', [None, ('t_ANY_float', 'ANY_float'), None, None, None, None, None]), ('(?P<t_ANY_int>(?P<p1>(?:0x[a-fA-F0-9]+)|(?:[0-9]+))(?P<s
 uf>[uUlL]*))', [None, ('t_ANY_int', 'ANY_int'), None, None]), ('(?P<t_ANY_character_constant>L?\'(\\\\.|[^\\\\\'])+\')|(?P<t_ANY_string_literal>L?"(\\\\.|[^\\\\"])*")|(?P<t_ANY_lparen>\\()|(?P<t_DEFINE_newline>\\n)|(?P<t_DEFINE_pp_param_op>(\\#\\#)|(\\#))', [None, ('t_ANY_character_constant', 'ANY_character_constant'), None, ('t_ANY_string_literal', 'ANY_string_literal'), None, ('t_ANY_lparen', 'ANY_lparen'), ('t_DEFINE_newline', 'DEFINE_newline'), ('t_DEFINE_pp_param_op', 'DEFINE_pp_param_op')])]}
+_lexstateignore = {'INITIAL': ' \t\x0b\x0c\r', 'DEFINE': ' \t\x0b\x0c\r'}
+_lexstateerrorf = {'INITIAL': 't_INITIAL_error', 'DEFINE': 't_DEFINE_error'}

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/parsetab.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/parsetab.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/parsetab.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,282 @@
+
+# /Users/tim/Desktop/ctypesgen/ctypesgencore/parser/parsetab.py
+# This file is automatically generated. Do not edit.
+
+_lr_method = 'LALR'
+
+_lr_signature = 'Oz\xa0\xf9\xf3X\xd9Mf\x00\xfa"\xb3\xb7\xce,'
+
+_lr_action_items = {'VOID':([387,15,238,332,22,3,37,359,137,64,170,286,58,51,230,353,12,2,39,73,20,60,47,54,25,65,168,56,242,81,0,13,27,72,116,278,4,133,277,231,221,9,280,279,34,186,130,169,358,227,18,123,327,1,360,117,17,36,165,205,30,8,52,196,35,356,339,140,269,235,23,283,31,167,134,260,287,87,5,6,10,32,38,66,28,163,59,49,237,26,67,172,],[-253,-2,-169,-155,-3,6,-132,-251,6,6,-141,6,-222,-104,-171,-135,-118,6,-121,-247,-167,6,6,6,-120,-138,6,-246,-219,6,-1,-133,6,6,-254,6,-163,-245,-243,6,6,-124,-136,-142,-125,6,-170,6,-134,-154,-126,-173,6,6,-252,-168,-130,-129,-244,-249,-241,-123,-156,-248,-128,-143,-220,6,-221,-172,-117,-144,-127,6,-223,-218,-250,6,-131,-122,-119,-242,-164,6,6,-217,6,-165,6,-166,-137,6,]),'DIV_ASSIGN':([309,155,363,91,179,366,182,180,316,217,313,75,317,181,211,100,102,184,388,423,77,314,420,410,99,142,78,310,318,185,213,74,90,104,292,220,84,105,110,364,368,109,],[-8,-4,-57,-19,-15,-25,-44,-16,-26,-17,-5,-18,-27,-14,-29,-22,-4,-45,-32,-35,-9,-7,-34,-33,-1
 6,251,-56,-6,-24,-47,-28,-20,-11,-10,-21,-46,-12,-13,-49,-48,-23,-43,]),'PP_DEFINE':([205,30,116,56,51,277,287,359,260,387,163,196,32,339,360,15,242,0,165,73,269,22,1,133,],[-249,-241,-254,-246,-104,-243,-250,-251,-218,-253,-217,-248,-242,-220,-252,-2,-219,-1,-244,-247,-221,-3,11,-245,]),'LE_OP':([78,310,100,368,301,184,96,311,211,90,297,309,220,298,364,75,155,318,102,182,423,305,104,91,110,74,213,99,109,299,86,185,97,105,77,410,296,179,181,363,84,295,217,420,180,312,292,366,388,317,306,308,313,316,142,314,85,307,83,300,],[-56,-6,-22,-23,-67,-45,201,201,-29,-11,-59,-8,-46,-61,-48,-18,-4,-24,-4,-44,-35,-71,-10,-19,-49,-20,-28,-16,-43,-60,-68,-47,-58,-13,-9,-33,-64,-15,-14,-57,-12,-63,-17,-34,-16,201,-21,-25,-32,-27,-69,-70,-5,-26,-56,-7,-62,-72,-65,-66,]),'(':([9,280,3,324,133,145,67,22,130,276,108,218,55,254,75,155,199,60,409,314,151,248,28,358,93,36,113,405,339,25,263,360,189,102,268,210,377,14,79,165,99,257,158,223,235,380,376,250,366,187,52,117,166,231,72,397,196,87,208,1
 7,143,30,163,353,105,13,284,57,6,141,181,193,201,260,272,379,242,46,123,44,277,289,154,247,190,255,270,135,179,114,134,84,29,228,240,49,152,82,217,331,399,5,243,316,2,91,38,213,225,387,349,15,292,357,234,35,384,198,354,171,76,186,317,23,330,0,65,8,109,211,144,342,287,41,252,61,414,293,26,140,205,249,37,51,328,137,90,408,95,34,107,261,204,381,238,47,103,346,374,183,269,382,203,10,68,80,313,262,371,413,159,94,20,32,259,221,164,176,367,73,344,153,267,197,58,372,194,230,340,393,161,338,127,207,219,136,7,104,378,195,150,215,56,100,258,180,266,369,192,1,359,45,147,402,191,188,253,415,246,332,18,74,309,256,264,31,43,370,392,174,251,116,202,39,310,396,216,318,139,4,69,16,81,227,224,394,406,12,77,348,368,42,245,200,],[-124,-136,-111,-199,-245,257,-137,-3,-170,137,-53,72,72,-94,-18,-4,-42,137,137,-7,72,-100,-109,-134,-41,-129,72,-234,-220,-120,137,-252,72,-4,-238,72,137,16,183,-244,-16,137,137,327,-172,137,-196,-91,-25,-145,-156,-168,137,231,137,-231,-248,-148,72,-130,-209,-241,-217,-
 135,-13,-133,72,16,-122,137,-14,72,72,-218,137,-215,-219,70,-173,-178,-243,72,-213,-99,72,-97,-228,-210,-15,221,-223,-12,16,72,137,54,266,-52,-17,327,-229,-131,-239,-26,-107,-19,-164,-28,221,-253,381,-2,-21,16,231,-128,137,72,72,16,183,137,-27,-117,-195,-1,-138,-123,215,-29,-226,137,-250,-108,-96,-110,-233,72,54,137,-249,-98,-132,-104,72,137,-11,137,-51,-125,-55,-227,72,137,-169,72,-54,137,-197,137,-221,137,72,-119,-179,186,-5,137,-194,-235,-211,-50,-167,-242,-225,221,16,72,137,-247,-216,-224,137,72,-222,-201,72,-171,-240,-198,272,-236,231,72,137,-212,-174,-10,137,-147,264,137,-246,-22,-237,-16,137,72,72,16,-251,-176,137,409,72,72,-90,-230,-93,-155,-126,-20,-8,137,137,-127,-175,-200,-202,72,-92,-254,72,-121,-6,-232,137,-24,-208,-163,-177,16,-146,-154,72,137,137,-118,-9,-214,-23,-112,-95,304,]),'STRUCT':([15,17,140,64,221,277,238,186,39,359,12,165,283,26,360,87,170,18,227,278,37,3,27,31,123,59,54,66,32,72,237,116,231,38,242,10,51,67,117,1,235,5,353,22,260,134,205,36,269,230,1
 33,280,356,6,286,73,25,327,137,279,169,56,339,65,358,0,4,35,387,196,47,60,130,9,13,172,20,49,81,30,332,23,58,163,167,52,34,168,287,2,8,28,],[-2,-130,33,33,33,-243,-169,33,-121,-251,-118,-244,-144,-166,-252,33,-141,-126,-154,33,-132,33,33,-127,-173,33,33,33,-242,33,33,-254,33,-164,-219,-119,-104,-137,-168,33,-172,-131,-135,-3,-218,-223,-249,-129,-221,-171,-245,-136,-143,-122,33,-247,-120,33,33,-142,33,-246,-220,-138,-134,-1,-163,-128,-253,-248,33,33,-170,-124,-133,33,-167,-165,33,-241,-155,-117,-222,-217,33,-156,-125,33,-250,33,-123,33,]),',':([182,244,274,90,39,100,300,372,313,395,185,318,390,345,386,370,311,38,403,347,65,75,234,4,45,106,421,323,355,220,92,315,376,238,110,402,321,13,74,301,362,132,117,173,127,235,34,96,314,423,280,213,295,142,85,331,121,177,8,49,126,179,296,25,337,3,180,5,31,322,52,112,366,422,119,398,330,388,77,383,101,41,67,316,416,162,86,374,128,123,417,364,230,102,68,305,281,307,9,35,352,20,306,332,61,122,308,26,78,6,401,275,392,43,389,297,418,28,317,343
 ,148,184,358,368,97,400,129,155,36,42,104,419,291,124,2,64,312,410,353,282,175,292,333,10,335,391,181,62,299,89,115,232,17,285,404,12,18,44,303,385,341,334,211,83,298,37,63,351,393,223,69,363,105,412,7,130,23,84,302,109,91,420,71,309,319,217,273,350,227,99,371,233,324,320,160,310,],[-44,262,-116,-11,-121,-22,-66,-201,-5,262,-47,-24,-31,262,-150,-200,-75,-164,-37,262,-138,-18,-191,-163,-176,-78,-36,262,-152,-46,-84,-77,-196,-169,-49,-39,262,-133,-20,-67,-256,-115,-168,-255,-186,-172,-125,-73,-7,-35,-136,-28,-63,-56,-62,-193,-161,262,-123,-165,-182,-15,-64,-120,-183,-111,-16,-131,-127,-85,-156,-86,-25,411,-159,262,-195,-32,-9,-204,-76,-108,-137,-26,262,-88,-68,-197,236,-173,-38,-48,-171,-4,-179,-71,-149,-72,-124,-128,384,-167,-69,-155,-110,229,-70,-166,-56,-122,-205,-203,-202,-175,-36,-59,411,-109,-27,-102,262,-45,-134,-23,-58,-207,237,-4,-129,-112,-10,-40,-81,-187,-107,-115,-74,-33,-135,-151,290,-21,-162,-119,-188,-87,-14,-113,-60,-80,-103,-185,-130,357,411,-118,-126,-178,-83
 ,-153,-89,-160,-29,-65,-61,-132,164,-206,-198,-192,-177,-57,-13,-36,-174,-170,-117,-12,-79,-43,-19,-34,-82,-8,-30,-17,-114,262,-154,-16,-194,-184,-199,367,-101,-6,]),'LONG':([237,286,52,51,56,31,36,353,134,205,87,9,18,287,356,67,358,17,230,130,54,123,37,27,116,35,22,10,242,137,165,231,196,278,186,277,58,3,38,283,64,23,72,238,32,47,172,59,39,4,360,117,81,140,12,260,387,168,25,5,0,167,169,66,227,327,28,280,65,8,30,339,34,73,13,279,26,6,133,359,269,49,221,170,15,20,332,235,60,2,163,1,],[18,18,-156,-104,-246,-127,-129,-135,-223,-249,18,-124,-126,-250,-143,-137,-134,-130,-171,-170,18,-173,-132,18,-254,-128,-3,-119,-219,18,-244,18,-248,18,18,-243,-222,18,-164,-144,18,-117,18,-169,-242,18,18,18,-121,-163,-252,-168,18,18,-118,-218,-253,18,-120,-131,-1,18,18,18,-154,18,18,-136,-138,-123,-241,-220,-125,-247,-133,-142,-166,-122,-245,-251,-221,-165,18,-141,-2,-167,-155,-172,18,18,-217,18,]),'ELLIPSIS':([237,],[336,]),'<':([423,217,301,364,109,366,313,420,368,295,85,77,102,305,292,388,96
 ,90,184,314,312,181,104,363,316,297,299,317,155,99,213,142,91,105,179,309,78,75,410,310,182,296,306,97,185,86,298,211,83,311,300,100,308,307,318,74,84,220,180,110,],[-35,-17,-67,-48,-43,-25,-5,-34,-23,-63,-62,-9,-4,-71,-21,-32,202,-11,-45,-7,202,-14,-10,-57,-26,-59,-60,-27,-4,-16,-28,-56,-19,-13,-15,-8,-56,-18,-33,-6,-44,-64,-69,-58,-47,-68,-61,-29,-65,202,-66,-22,-70,-72,-24,-20,-12,-46,-16,-49,]),'GOTO':([136,394,263,153,415,399,338,414,163,268,269,260,261,51,243,258,135,159,405,60,344,147,270,58,413,240,140,408,406,346,267,348,396,259,134,397,154,242,340,378,144,143,380,379,382,339,158,139,],[-212,138,138,-224,-230,-229,-236,-233,-217,-238,-221,-218,-227,-104,-239,-237,-210,-211,-234,138,-216,138,-228,-222,-235,138,138,138,138,138,138,-214,-232,-225,-223,-231,-213,-219,-240,138,-226,-209,138,-215,138,-220,138,-208,]),'ENUM':([59,3,8,327,130,72,10,260,172,169,332,13,231,9,73,279,167,163,356,81,230,137,116,47,238,269,227,133,117,37,12,170,221,237,34,87,2,360,280,66,58,168,2
 42,277,65,23,39,28,20,22,196,5,18,32,15,31,278,1,17,205,283,123,52,25,51,6,67,30,358,35,287,339,235,60,36,387,38,54,0,165,26,64,56,359,140,353,4,134,186,49,27,286,],[24,24,-123,24,-170,24,-119,-218,24,24,-155,-133,24,-124,-247,-142,24,-217,-143,24,-171,24,-254,24,-169,-221,-154,-245,-168,-132,-118,-141,24,24,-125,24,24,-252,-136,24,-222,24,-219,-243,-138,-117,-121,24,-167,-3,-248,-131,-126,-242,-2,-127,24,24,-130,-249,-144,-173,-156,-120,-104,-122,-137,-241,-134,-128,-250,-220,-172,24,-129,-253,-164,24,-1,-244,-166,24,-246,-251,24,-135,-163,-223,24,-165,24,24,]),'PERIOD':([104,309,84,109,105,91,90,179,99,213,313,77,211,155,75,100,366,310,292,368,180,217,318,317,314,181,316,102,74,],[-10,-8,-12,212,-13,-19,-11,-15,-16,-28,-5,-9,-29,-4,-18,-22,-25,-6,-21,-23,-16,-17,-24,-27,-7,-14,-26,-4,-20,]),'PP_END_DEFINE':([310,306,311,100,36,101,112,91,361,115,74,292,88,114,371,307,84,289,280,65,83,331,17,318,5,330,391,308,305,316,6,368,364,110,358,195,388,96,97,44,312,332,303,315,86,322
 ,180,302,222,288,353,223,43,291,67,220,179,370,317,7,366,423,182,181,31,314,4,69,106,81,71,18,372,35,309,37,217,109,313,8,75,376,187,105,393,363,52,48,211,301,298,102,184,38,185,45,77,13,68,324,227,392,420,174,104,213,225,9,90,297,410,300,47,99,374,89,85,296,92,87,34,299,78,98,295,],[-6,-69,-75,-22,-129,-76,-86,-19,387,-103,-20,-21,196,-189,-194,-72,-12,360,-136,-138,-65,-193,-130,-24,-131,-195,-87,-70,-71,-26,-122,-23,-48,-49,-134,-147,-32,-73,-58,-178,-74,-155,-83,-77,-68,-85,-16,-79,-190,359,-135,-192,-175,-81,-137,-46,-15,-200,-27,-174,-25,-35,-44,-14,-127,-7,-163,-177,-78,-146,-82,-126,-201,-128,-8,-132,-17,-43,-5,-123,-18,-196,-145,-13,-198,-57,-156,116,-29,-67,-61,-4,-45,-164,-47,-176,-9,-133,-179,-199,-154,-202,-34,287,-10,-28,-191,-124,-11,-59,-33,-66,73,-16,-197,-80,-62,-64,-84,-148,-125,-60,-56,205,-63,]),'LEFT_OP':([423,142,110,299,309,308,77,292,306,75,295,305,388,86,179,217,91,364,155,220,180,105,310,213,410,104,366,90,313,211,317,97,100,316,182,84,85,298,420,2
 96,318,314,363,185,83,184,368,74,300,307,99,181,102,109,297,301,78,],[-35,-56,-49,-60,-8,193,-9,-21,193,-18,-63,193,-32,193,-15,-17,-19,-48,-4,-46,-16,-13,-6,-28,-33,-10,-25,-11,-5,-29,-27,-58,-22,-26,-44,-12,-62,-61,-34,-64,-24,-7,-57,-47,-65,-45,-23,-20,-66,193,-16,-14,-4,-43,-59,-67,-56,]),'DOUBLE':([6,278,221,4,2,27,18,286,279,186,0,358,1,10,116,327,35,140,227,37,65,167,8,9,81,353,54,73,28,280,47,133,360,66,170,20,36,58,230,64,283,231,168,169,67,56,39,72,196,130,238,12,13,31,137,242,5,277,134,387,49,172,22,3,23,332,25,38,60,356,165,15,205,237,87,30,339,34,52,59,163,269,287,26,235,32,51,260,117,17,123,359,],[-122,35,35,-163,35,35,-126,35,-142,35,-1,-134,35,-119,-254,35,-128,35,-154,-132,-138,35,-123,-124,35,-135,35,-247,35,-136,35,-245,-252,35,-141,-167,-129,-222,-171,35,-144,35,35,35,-137,-246,-121,35,-248,-170,-169,-118,-133,-127,35,-219,-131,-243,-223,-253,-165,35,-3,35,-117,-155,-120,-164,35,-143,-244,-2,-249,35,35,-241,-220,-125,-156,35,-217,-221,-250,-166,-172,-242,
 -104,-218,-168,-130,-173,-251,]),'INC_OP':([210,189,255,264,159,378,188,348,366,397,245,270,408,328,102,252,317,399,316,174,224,213,242,267,191,99,380,109,140,134,415,284,163,192,257,181,256,249,77,381,346,396,262,55,84,215,94,135,394,384,247,217,208,377,379,166,216,74,113,259,183,91,139,251,276,155,344,180,137,413,151,266,80,190,340,405,219,248,313,47,76,141,207,153,254,79,369,338,158,318,198,197,105,204,409,269,243,60,268,176,100,202,194,143,258,51,211,90,240,144,414,228,293,107,201,367,246,179,310,58,289,339,103,218,368,382,272,261,75,104,260,253,147,309,186,72,203,82,342,136,108,250,406,154,314,193,263,292,354,95,],[76,76,-97,76,-211,76,76,-214,-25,-231,-95,-228,76,76,-4,-96,-27,-229,-26,76,76,-28,-219,76,76,-16,76,213,76,-223,-230,76,-217,76,76,-14,76,-98,-9,76,76,-232,76,76,-12,76,-50,-210,76,76,-99,-17,76,76,-215,76,76,-20,76,-225,76,-19,-208,-92,76,-4,-216,-16,76,-235,76,76,76,76,-240,-234,76,-100,-5,76,76,76,76,-224,-94,76,76,-236,76,-24,76,76,-13,76,76,-221,-239,76
 ,-238,76,-22,76,76,-209,-237,-104,-29,-11,76,-226,-233,76,76,-55,76,76,-93,-15,-6,-222,76,-220,-54,76,-23,76,76,-227,-18,-10,-218,-90,76,-8,76,76,76,-52,76,-212,-53,-91,76,-213,-7,76,76,-21,76,-51,]),'|':([316,109,105,420,104,155,314,213,301,297,317,89,296,182,85,308,309,78,305,423,211,368,300,388,100,313,96,86,97,184,185,74,315,180,311,307,292,84,366,363,77,99,181,299,142,364,303,75,310,295,306,291,91,179,102,298,83,410,318,90,220,217,110,106,312,101,302,71,],[-26,-43,-13,-34,-10,-4,-7,-28,-67,-59,-27,-80,-64,-44,-62,-70,-8,-56,-71,-35,-29,-23,-66,-32,-22,-5,-73,-68,-58,-45,-47,-20,-77,-16,-75,-72,-21,-12,-25,-57,-9,-16,-14,-60,-56,-48,176,-18,-6,-63,-69,-81,-19,-15,-4,-61,-65,-33,-24,-11,-46,-17,-49,-78,-74,-76,-79,176,]),'NE_OP':([109,364,105,213,182,297,420,104,78,317,410,313,85,155,309,74,296,86,316,101,308,97,185,423,368,300,388,100,96,184,180,211,292,305,77,84,142,311,181,307,301,310,363,99,306,298,318,90,314,75,299,110,295,91,217,179,220,83,315,102,366,312,],[-43,-48
 ,-13,-28,-44,-59,-34,-10,-56,-27,-33,-5,-62,-4,-8,-20,-64,-68,-26,207,-70,-58,-47,-35,-23,-66,-32,-22,-73,-45,-16,-29,-21,-71,-9,-12,-56,-75,-14,-72,-67,-6,-57,-16,-69,-61,-24,-11,-7,-18,-60,-49,-63,-19,-17,-15,-46,-65,207,-4,-25,-74,]),'SHORT':([10,81,51,36,13,339,26,27,8,30,387,196,356,230,12,231,278,260,163,54,123,67,1,170,286,73,287,242,18,15,47,0,87,6,221,34,3,37,279,137,269,238,283,327,56,58,277,31,17,59,72,205,237,9,38,20,235,169,23,167,227,186,130,168,133,165,134,49,332,22,116,35,4,353,32,64,66,140,39,25,52,280,2,28,5,172,117,358,359,65,60,360,],[-119,9,-104,-129,-133,-220,-166,9,-123,-241,-253,-248,-143,-171,-118,9,9,-218,-217,9,-173,-137,9,-141,9,-247,-250,-219,-126,-2,9,-1,9,-122,9,-125,9,-132,-142,9,-221,-169,-144,9,-246,-222,-243,-127,-130,9,9,-249,9,-124,-164,-167,-172,9,-117,9,-154,9,-170,9,-245,-244,-223,-165,-155,-3,-254,-128,-163,-135,-242,9,9,9,-121,-120,-156,-136,9,9,-131,9,-168,-134,-251,-138,9,-252,]),'CONSTANT':([55,257,163,47,144,328,263,190,197,140,3
 99,251,176,266,354,76,203,367,409,381,137,284,378,58,192,240,174,408,380,270,51,94,154,267,369,151,253,260,259,72,250,191,258,82,242,208,248,183,202,228,60,276,194,153,166,193,136,224,159,269,158,80,396,339,204,216,377,261,107,198,346,384,188,406,338,201,141,340,147,348,186,249,215,382,219,139,207,414,394,254,262,218,247,256,344,246,415,210,143,245,413,289,135,113,79,379,252,189,272,134,293,405,255,268,264,342,103,397,243,95,108,],[77,77,-217,77,-226,77,77,77,77,77,-229,-92,77,77,77,77,77,77,77,77,77,77,77,-222,77,77,77,77,77,-228,-104,-50,-213,77,77,77,-90,-218,-225,77,-91,77,-237,-52,-219,77,-100,77,77,77,77,77,77,-224,77,77,-212,77,-211,-221,77,77,-232,-220,77,77,77,-227,-55,77,77,77,77,77,-236,77,77,-240,77,-214,77,-98,77,77,77,-208,77,-233,77,-94,77,77,-99,77,-216,-93,-230,77,-209,-95,-235,77,-210,77,77,-215,-96,77,77,-223,77,-234,-97,-238,77,77,-54,-231,-239,-51,-53,]),'DEC_OP':([397,192,190,252,258,245,109,268,90,219,246,228,211,75,250,95,409,240,249,113,270,405,256,9
 4,310,55,316,313,100,47,276,198,269,254,259,99,354,263,314,140,379,134,208,257,266,74,108,144,76,155,215,255,210,166,193,158,413,253,176,309,243,340,396,204,186,135,213,292,247,203,348,179,107,378,242,163,394,139,218,217,79,328,384,201,194,105,143,154,82,284,382,137,414,260,181,103,147,191,174,262,102,408,51,202,58,267,369,289,91,72,261,399,344,338,159,248,272,264,104,293,342,188,197,153,180,381,380,84,346,207,317,224,251,151,339,216,80,367,318,406,415,189,368,366,377,141,136,77,183,60,],[-231,79,79,-96,-237,-95,211,-238,-11,79,-93,79,-29,-18,-91,-51,79,79,-98,79,-228,-234,79,-50,-6,79,-26,-5,-22,79,79,79,-221,-94,-225,-16,79,79,-7,79,-215,-223,79,79,79,-20,-53,-226,79,-4,79,-97,79,79,79,79,-235,-90,79,-8,-239,-240,-232,79,79,-210,-28,-21,-99,79,-214,-15,-55,79,-219,-217,79,-208,79,-17,79,79,79,79,79,-13,-209,-213,-52,79,79,79,-233,-218,-14,-54,79,79,79,79,-4,79,-104,79,-222,79,79,79,-19,79,-227,-229,-216,-236,-211,-100,79,79,-10,79,79,79,79,-224,-16,79,79,-12,79,79,-27,79,-
 92,79,-220,79,79,79,-24,79,-230,79,-23,-25,79,79,-212,-9,79,79,]),'STATIC':([358,130,123,49,360,327,117,37,28,25,140,3,235,359,30,205,0,237,230,6,58,339,9,27,31,12,4,8,22,39,35,1,134,56,242,387,18,51,238,32,13,64,38,36,163,59,10,60,5,260,2,287,167,221,20,52,332,227,73,17,15,280,54,196,26,116,23,67,269,65,277,353,34,133,165,231,],[-134,-170,-173,-165,-252,10,-168,-132,10,-120,10,10,-172,-251,-241,-249,-1,10,-171,-122,-222,-220,-124,10,-127,-118,-163,-123,-3,-121,-128,10,-223,-246,-219,-253,-126,-104,-169,-242,-133,10,-164,-129,-217,10,-119,10,-131,-218,10,-250,10,10,-167,-156,-155,-154,-247,-130,-2,-136,10,-248,-166,-254,-117,-137,-221,-138,-243,-135,-125,-245,-244,10,]),'EQ_OP':([100,308,311,314,101,301,185,423,99,307,90,142,297,77,410,220,96,78,110,295,97,298,84,184,74,363,104,83,312,299,318,315,213,366,182,305,292,364,309,300,86,316,155,180,306,109,85,388,296,313,105,91,181,179,211,310,102,217,420,317,368,75,],[-22,-70,-75,-7,208,-67,-47,-35,-16,-72,-11,-56,-59,-9,-33,-46,
 -73,-56,-49,-63,-58,-61,-12,-45,-20,-57,-10,-65,-74,-60,-24,208,-28,-25,-44,-71,-21,-48,-8,-66,-68,-26,-4,-16,-69,-43,-62,-32,-64,-5,-13,-19,-14,-15,-29,-6,-4,-17,-34,-27,-23,-18,]),'SIZEOF':([80,203,192,268,247,107,284,191,396,219,253,249,339,95,415,406,381,60,269,346,190,264,243,82,263,259,207,151,158,405,163,147,58,197,262,141,55,218,266,76,137,51,293,204,188,270,245,79,380,193,378,72,342,134,240,413,379,255,246,369,338,367,408,143,103,144,250,414,377,153,409,272,251,113,397,139,216,94,267,210,194,215,166,384,189,140,260,183,174,108,394,276,201,136,399,261,256,252,328,135,248,242,154,254,186,159,257,208,340,47,354,198,289,228,258,224,382,348,176,344,202,],[80,80,80,-238,-99,-55,80,80,-232,80,-90,-98,-220,-51,-230,80,80,80,-221,80,80,80,-239,-52,80,-225,80,80,80,-234,-217,80,-222,80,80,80,80,80,80,80,80,-104,80,80,80,-228,-95,80,80,80,80,80,80,-223,80,-235,-215,-97,-93,80,-236,80,80,-209,-54,-226,-91,-233,80,-224,80,80,-92,80,-231,-208,80,-50,80,80,80,80,80,80,80,80,-218,8
 0,80,-53,80,80,80,-212,-229,-227,80,-96,80,-210,-100,-219,-213,-94,80,-211,80,80,-240,80,80,80,80,80,-237,80,80,-214,80,-216,80,]),'+':([208,109,289,245,191,60,110,405,147,144,293,252,183,269,409,268,217,318,354,100,94,105,192,246,314,83,295,243,317,414,310,248,420,179,107,264,363,316,298,366,377,276,134,301,79,99,136,257,256,166,55,84,140,190,253,261,260,339,186,213,210,263,266,182,193,348,378,368,242,142,408,309,51,394,215,296,197,284,249,113,153,299,155,340,396,204,344,218,254,228,194,159,211,85,75,154,82,135,313,220,247,240,181,180,97,74,369,262,47,207,384,258,381,76,103,216,90,80,143,388,415,379,410,338,342,267,141,270,78,72,201,300,108,91,176,399,382,406,292,189,297,188,95,203,77,272,413,163,380,104,174,102,397,139,346,328,202,224,251,151,259,185,184,137,58,367,423,198,364,158,255,219,250,],[82,-43,82,-95,82,82,-49,-234,82,-226,82,-96,82,-221,82,-238,-17,-24,82,-22,-50,-13,82,-93,-7,188,-63,-239,-27,-233,-6,-100,-34,-15,-55,82,-57,-26,-61,-25,82,82,-223,188,82,-16,-212
 ,82,82,82,82,-12,82,82,-90,-227,-218,-220,82,-28,82,82,82,-44,82,-214,82,-23,-219,-56,82,-8,-104,82,82,-64,82,82,-98,82,-224,-60,-4,-240,-232,82,-216,82,-94,82,82,-211,-29,-62,-18,-213,-52,-210,-5,-46,-99,82,-14,-16,-58,-20,82,82,82,82,82,-237,82,82,-54,82,-11,82,-209,-32,-230,-215,-33,-236,82,82,82,-228,-56,82,82,188,-53,-19,82,-229,82,82,-21,82,-59,82,-51,82,-9,82,-235,-217,82,-10,82,-4,-231,-208,82,82,82,82,-92,82,-225,-47,-45,82,-222,82,-35,82,-48,82,-97,82,-91,]),'UNSIGNED':([36,235,52,8,133,18,66,287,35,170,20,49,221,205,358,22,2,10,353,277,242,231,73,140,64,67,59,39,87,134,54,196,356,137,186,283,47,3,359,165,5,60,38,227,230,116,360,4,278,65,37,32,28,12,172,23,81,387,167,17,327,13,130,27,58,168,51,30,31,0,169,117,56,279,1,237,339,238,280,260,26,286,6,72,34,123,25,163,269,9,332,15,],[-129,-172,-156,-123,-245,-126,17,-250,-128,-141,-167,-165,17,-249,-134,-3,17,-119,-135,-243,-219,17,-247,17,17,-137,17,-121,17,-223,17,-248,-143,17,17,-144,17,17,-251,-244,-131,17,-164,-154
 ,-171,-254,-252,-163,17,-138,-132,-242,17,-118,17,-117,17,-253,17,-130,17,-133,-170,17,-222,17,-104,-241,-127,-1,17,-168,-246,-142,17,17,-220,-169,-136,-218,-166,17,-122,17,-125,-173,-120,-217,-221,-124,-155,-2,]),'/':([84,99,155,180,77,90,368,142,388,211,75,109,217,292,313,364,91,100,423,317,78,314,295,410,213,297,102,105,110,363,184,104,309,316,318,298,97,185,420,181,310,74,179,296,299,220,182,85,366,],[-12,-16,-4,-16,-9,-11,-23,-56,-32,-29,-18,-43,-17,-21,-5,-48,-19,-22,-35,-27,-56,-7,192,-33,-28,-59,-4,-13,-49,-57,-45,-10,-8,-26,-24,-61,-58,-47,-34,-14,-6,-20,-15,192,-60,-46,-44,192,-25,]),';':([42,130,90,285,316,341,38,77,63,401,67,312,342,134,301,5,182,75,4,348,52,309,397,280,83,12,17,240,303,60,317,148,74,405,162,25,263,298,2,385,311,257,158,185,306,241,99,28,184,136,10,235,410,353,36,302,299,243,117,261,84,144,260,35,220,49,380,181,64,269,339,268,26,57,242,105,6,160,34,413,135,296,414,143,87,3,244,382,19,292,155,29,115,132,217,146,163,37,106,346,8,123,408,211,140,85,
 310,154,171,65,343,153,338,391,91,386,179,364,314,230,399,156,363,394,187,61,238,275,109,407,180,300,41,388,270,89,18,100,379,308,396,322,97,213,273,96,23,58,71,159,81,318,281,378,142,104,258,51,102,139,62,355,112,110,147,332,20,195,420,157,297,315,13,340,274,344,291,305,78,9,358,282,313,239,86,415,31,259,295,92,307,423,39,383,267,141,406,368,227,101,366,],[-112,-170,-11,356,-26,-89,-164,-9,-106,-205,-137,-74,144,-223,-67,-131,-44,-18,-163,-214,-156,-8,-231,-136,-65,-118,-130,144,-83,144,-27,261,-20,-234,-88,-120,144,-61,-107,-153,-75,144,144,-47,-69,270,-16,-109,-45,-212,-119,-172,-33,-135,-129,-79,-60,-239,-168,-227,-12,-226,-218,-128,-46,-165,144,-14,-115,-221,-220,-238,-166,-105,-219,-13,-122,-101,-125,-235,-210,-64,-233,-209,-148,-111,340,144,51,-21,-4,-105,-103,-115,-17,258,-217,-132,-78,144,-123,-173,144,-29,144,-62,-6,-213,283,-138,-102,-224,-236,-87,-19,-150,-15,-48,-7,-171,-229,268,-57,144,-145,-110,-169,-203,-43,414,-16,-66,-108,-32,-228,-80,-126,-22,-215,-70,-232
 ,-85,-58,-28,-114,-73,-117,-222,-82,-211,-146,-24,-149,144,-56,-10,-237,-104,-4,-208,-113,-152,-86,-49,144,-155,-167,-147,-34,270,-59,-77,-133,-240,-116,-216,-81,-71,-56,-124,-134,-151,-5,338,-68,-230,-127,-225,-63,-84,-72,-35,-121,-204,144,243,144,-23,-154,-76,-25,]),'?':([85,155,302,77,99,91,410,90,388,100,211,96,92,217,179,101,97,420,423,89,303,317,363,110,184,313,74,180,309,291,102,305,112,185,368,301,86,297,181,142,104,78,318,314,310,306,315,84,298,312,300,220,182,311,307,83,366,299,75,106,295,109,316,292,105,296,364,308,322,213,71,],[-62,-4,-79,-9,-16,-19,-33,-11,-32,-22,-29,-73,-84,-17,-15,-76,-58,-34,-35,-80,-83,-27,-57,-49,-45,-5,-20,-16,-8,-81,-4,-71,219,-47,-23,-67,-68,-59,-14,-56,-10,-56,-24,-7,-6,-69,-77,-12,-61,-74,-66,-46,-44,-75,-72,-65,-25,-60,-18,-78,-63,-43,-26,-21,-13,-64,-48,-70,-85,-28,-82,]),'$end':([73,339,15,165,51,359,1,0,133,387,287,116,56,32,22,277,269,242,260,196,360,30,205,163,],[-247,-220,-2,-244,-104,-251,0,-1,-245,-253,-250,-254,-246,-242,-3,
 -243,-221,-219,-218,-248,-252,-241,-249,-217,]),'FOR':([340,60,135,144,268,153,143,139,338,396,378,147,346,58,397,136,260,413,394,406,339,159,134,348,154,242,51,263,259,379,158,267,344,405,408,380,163,140,240,382,261,243,415,269,399,258,270,414,],[-240,145,-210,-226,-238,-224,-209,-208,-236,-232,145,145,145,-222,-231,-212,-218,-235,145,145,-220,-211,-223,-214,-213,-219,-104,145,-225,-215,145,145,-216,-234,145,145,-217,145,145,145,-227,-239,-230,-221,-229,-237,-228,-233,]),'UNION':([65,242,238,37,278,287,260,13,3,23,81,137,58,387,30,34,186,56,1,140,51,66,205,38,130,116,170,269,227,64,172,72,22,17,36,286,12,280,332,39,230,353,163,0,8,60,27,35,169,5,123,231,67,10,52,20,221,47,327,32,15,358,283,73,18,26,59,167,134,235,49,168,6,9,4,359,339,237,31,277,279,360,25,133,356,165,2,87,54,28,196,117,],[-138,-219,-169,-132,21,-250,-218,-133,21,-117,21,21,-222,-253,-241,-125,21,-246,21,21,-104,21,-249,-164,-170,-254,-141,-221,-154,21,21,21,-3,-130,-129,21,-118,-136,-155,-121,-171,-135,-217
 ,-1,-123,21,21,-128,21,-131,-173,21,-137,-119,-156,-167,21,21,21,-242,-2,-134,-144,-247,-126,-166,21,21,-223,-172,-165,21,-122,-124,-163,-251,-220,21,-127,-243,-142,-252,-120,-245,-143,-244,21,21,21,21,-248,-168,]),'ELSE':([340,269,339,258,399,159,413,136,139,414,261,415,243,397,396,163,135,348,154,344,268,338,242,144,270,405,260,143,379,],[-240,-221,-220,-237,408,-211,-235,-212,-208,-233,-227,-230,-239,-231,-232,-217,-210,-214,-213,-216,-238,-236,-219,-226,-228,-234,-218,-209,-215,]),'[':([316,376,102,313,7,44,105,4,332,104,324,25,37,353,8,13,155,2,317,81,41,18,45,117,65,231,238,43,84,3,331,38,221,49,393,318,100,34,127,74,368,20,99,77,195,69,36,370,12,28,314,109,39,371,31,330,366,234,10,180,213,5,35,309,280,292,358,374,225,90,75,223,67,114,217,87,392,52,6,17,187,61,310,91,181,9,179,26,123,68,211,372,235,42,23,130,230,227,],[-26,-196,-4,-5,-174,-178,-13,-163,-155,-10,-199,-120,-132,-135,-123,-133,-4,-107,-27,-146,-108,-126,-176,-168,-138,224,-169,-175,-12,-111,328,-164,224,5
 5,-198,-24,-22,-125,224,-20,-23,-167,-16,-9,-147,-177,-129,-200,-118,-109,-7,216,-121,-194,-127,-195,-25,224,-119,-16,-28,-131,-128,-8,-136,-21,-134,-197,224,-11,-18,328,-137,224,-17,-148,-202,-156,-122,-130,-145,-110,-6,-19,-14,-124,-15,55,-173,-179,-29,-201,-172,-112,-117,-170,-171,-154,]),'OR_OP':([96,77,86,312,142,363,97,155,316,410,305,110,184,368,314,84,104,309,211,318,307,83,185,423,292,310,179,366,78,91,71,100,89,303,297,101,180,220,420,92,85,105,74,90,302,298,102,388,308,301,313,75,109,217,181,364,295,291,300,112,317,322,296,213,315,99,306,299,106,311,182,],[-73,-9,-68,-74,-56,-57,-58,-4,-26,-33,-71,-49,-45,-23,-7,-12,-10,-8,-29,-24,-72,-65,-47,-35,-21,-6,-15,-25,-56,-19,-82,-22,-80,-83,-59,-76,-16,-46,-34,-84,-62,-13,-20,-11,-79,-61,-4,-32,-70,-67,-5,-18,-43,-17,-14,-48,-63,-81,-66,218,-27,-85,-64,-28,-77,-16,-69,-60,-78,-75,-44,]),'SUB_ASSIGN':([363,105,77,318,292,366,78,213,423,109,317,368,313,211,388,185,91,314,104,142,102,310,410,420,184,99,182,316,309,179,364,
 75,110,74,220,180,181,90,84,155,100,217,],[-57,-13,-9,-24,-21,-25,-56,-28,-35,-43,-27,-23,-5,-29,-32,-47,-19,-7,-10,245,-4,-6,-33,-34,-45,-16,-44,-26,-8,-15,-48,-18,-49,-20,-46,-16,-14,-11,-12,-4,-22,-17,]),'XOR_ASSIGN':([366,102,182,91,78,388,100,309,179,185,213,420,142,313,105,292,181,74,310,104,99,423,220,410,110,84,217,77,184,316,75,180,155,109,317,368,364,90,363,314,318,211,],[-25,-4,-44,-19,-56,-32,-22,-8,-15,-47,-28,-34,247,-5,-13,-21,-14,-20,-6,-10,-16,-35,-46,-33,-49,-12,-17,-9,-45,-26,-18,-16,-4,-43,-27,-23,-48,-11,-57,-7,-24,-29,]),'INT':([2,137,47,32,358,27,59,140,5,117,65,260,227,72,221,3,9,81,283,360,332,359,18,1,28,230,279,66,163,8,30,116,170,64,280,231,238,37,13,20,133,0,58,34,73,52,54,186,56,167,169,196,205,38,49,67,287,277,36,15,327,12,387,356,339,26,39,22,51,134,235,353,165,60,130,87,168,31,172,6,17,35,269,286,123,242,4,237,10,278,23,25,],[34,34,34,-242,-134,34,34,34,-131,-168,-138,-218,-154,34,34,34,-124,34,-144,-252,-155,-251,-126,34,34,-171,-142,34,-217
 ,-123,-241,-254,-141,34,-136,34,-169,-132,-133,-167,-245,-1,-222,-125,-247,-156,34,34,-246,34,34,-248,-249,-164,-165,-137,-250,-243,-129,-2,34,-118,-253,-143,-220,-166,-121,-3,-104,-223,-172,-135,-244,34,-170,34,34,-127,34,-122,-130,-128,-221,34,-173,-219,-163,34,-119,34,-117,-120,]),'SIGNED':([31,356,6,168,137,28,25,287,0,36,167,358,196,327,235,8,123,170,35,27,277,2,38,237,360,10,227,32,56,134,87,359,116,260,59,30,242,34,283,67,230,186,13,81,20,3,163,23,387,269,22,5,66,165,73,205,140,17,278,286,47,130,54,279,51,37,26,65,12,9,172,49,39,15,280,169,332,58,1,231,60,238,353,133,18,339,221,64,4,117,52,72,],[-127,-143,-122,36,36,36,-120,-250,-1,-129,36,-134,-248,36,-172,-123,-173,-141,-128,36,-243,36,-164,36,-252,-119,-154,-242,-246,-223,36,-251,-254,-218,36,-241,-219,-125,-144,-137,-171,36,-133,36,-167,36,-217,-117,-253,-221,-3,-131,36,-244,-247,-249,36,-130,36,36,36,-170,36,-142,-104,-132,-166,-138,-118,-124,36,-165,-121,-2,-136,36,-155,-222,36,36,36,-169,-135,-245,-126,-220,36,
 36,-163,-168,-156,36,]),'CONTINUE':([58,340,243,408,154,269,144,143,258,338,394,136,159,263,413,339,382,259,153,344,399,268,242,406,158,415,135,240,405,147,414,134,163,267,60,140,260,396,346,261,270,380,379,397,139,378,51,348,],[-222,-240,-239,146,-213,-221,-226,-209,-237,-236,146,-212,-211,146,-235,-220,146,-225,-224,-216,-229,-238,-219,146,146,-230,-210,146,-234,146,-233,-223,-217,146,146,146,-218,-232,146,-227,-228,146,-215,-231,-208,146,-104,-214,]),'MUL_ASSIGN':([318,105,420,100,317,314,78,91,292,366,181,182,104,179,185,99,142,155,220,309,84,74,310,410,213,75,184,368,316,180,313,363,423,217,364,77,110,102,388,211,90,109,],[-24,-13,-34,-22,-27,-7,-56,-19,-21,-25,-14,-44,-10,-15,-47,-16,250,-4,-46,-8,-12,-20,-6,-33,-28,-18,-45,-23,-26,-16,-5,-57,-35,-17,-48,-9,-49,-4,-32,-29,-11,-43,]),'{':([267,24,346,238,408,52,379,58,159,136,258,243,405,158,339,344,276,51,139,384,380,261,144,415,64,269,26,340,406,135,163,414,140,338,67,230,134,394,270,348,154,27,60,396,33,235,397,167,2
 40,117,260,21,263,20,40,268,378,166,413,59,147,399,49,242,153,123,382,130,143,259,65,],[60,53,60,-169,60,118,-215,-222,-211,-212,-237,-239,-234,60,-220,-216,276,-104,-208,276,60,-227,-226,-230,60,-221,-166,-240,60,-210,-217,-233,60,-236,172,-171,-223,60,-228,-214,-213,60,60,-232,-139,-172,-231,60,60,-168,-218,-140,60,-167,66,-238,60,276,-235,60,60,-229,-165,-219,-224,-173,60,-170,-209,-225,168,]),'RIGHT_OP':([155,363,86,74,77,307,78,313,211,317,305,308,309,296,297,104,388,300,301,213,105,109,185,97,85,179,91,182,420,83,75,410,217,220,314,366,318,306,310,364,299,110,292,99,295,102,142,90,184,298,100,180,84,316,368,423,181,],[-4,-57,194,-20,-9,194,-56,-5,-29,-27,194,194,-8,-64,-59,-10,-32,-66,-67,-28,-13,-43,-47,-58,-62,-15,-19,-44,-34,-65,-18,-33,-17,-46,-7,-25,-24,194,-6,-48,-60,-49,-21,-16,-63,-4,-56,-11,-45,-61,-22,-16,-12,-26,-23,-35,-14,]),'REGISTER':([2,260,13,17,54,39,130,360,4,387,237,205,64,280,52,167,231,238,196,49,287,0,73,133,165,123,36,65,358,58,31,6,9,20,59,227,
 1,277,8,27,5,163,38,67,28,140,339,22,51,230,37,235,359,25,353,23,3,26,221,10,60,117,332,134,242,116,32,30,12,34,15,35,18,327,269,56,],[39,-218,-133,-130,39,-121,-170,-252,-163,-253,39,-249,39,-136,-156,39,39,-169,-248,-165,-250,-1,-247,-245,-244,-173,-129,-138,-134,-222,-127,-122,-124,-167,39,-154,39,-243,-123,39,-131,-217,-164,-137,39,39,-220,-3,-104,-171,-132,-172,-251,-120,-135,-117,39,-166,39,-119,39,-168,-155,-223,-219,-254,-242,-241,-118,-125,-2,-128,-126,39,-221,-246,]),'RIGHT_ASSIGN':([142,410,77,420,423,99,185,155,104,211,213,182,220,366,364,316,363,78,313,91,388,318,310,292,105,110,75,74,181,100,309,102,84,90,217,317,179,184,109,368,180,314,],[255,-33,-9,-34,-35,-16,-47,-4,-10,-29,-28,-44,-46,-25,-48,-26,-57,-56,-5,-19,-32,-24,-6,-21,-13,-49,-18,-20,-14,-22,-8,-4,-12,-11,-17,-27,-15,-45,-43,-23,-16,-7,]),'DEFAULT':([159,140,258,260,153,154,379,380,269,339,136,270,344,405,267,378,382,58,134,348,414,242,147,243,397,346,263,408,158,394,51,406,143,261,139,338,259,60,26
 8,340,413,135,163,415,399,144,396,240,],[-211,149,-237,-218,-224,-213,-215,149,-221,-220,-212,-228,-216,-234,149,149,149,-222,-223,-214,-233,-219,149,-239,-231,149,149,149,149,149,-104,149,-209,-227,-208,-236,-225,149,-238,-240,-235,-210,-217,-230,-229,-226,-232,149,]),'CHAR':([15,18,133,137,58,4,13,31,280,231,26,60,37,387,278,49,332,66,12,1,39,165,169,5,279,9,17,269,186,23,34,140,54,130,65,20,116,47,51,230,73,81,87,172,22,67,205,163,3,170,286,38,28,134,32,8,260,59,2,356,242,360,35,287,30,25,227,52,56,235,6,10,358,0,359,27,283,72,123,238,168,237,196,36,277,117,353,221,339,327,64,167,],[-2,-126,-245,8,-222,-163,-133,-127,-136,8,-166,8,-132,-253,8,-165,-155,8,-118,8,-121,-244,8,-131,-142,-124,-130,-221,8,-117,-125,8,8,-170,-138,-167,-254,8,-104,-171,-247,8,8,8,-3,-137,-249,-217,8,-141,8,-164,8,-223,-242,-123,-218,8,8,-143,-219,-252,-128,-250,-241,-120,-154,-156,-246,-172,-122,-119,-134,-1,-251,8,-144,8,-173,-169,8,8,-248,-129,-243,-168,-135,8,-220,8,8,8,]),'WHILE':([382,147,33
 8,413,153,259,258,344,139,405,136,134,415,397,267,339,394,379,380,396,240,159,261,399,243,348,260,140,408,154,406,51,143,270,242,378,163,58,269,268,340,414,135,60,158,271,263,346,144,],[150,150,-236,-235,-224,-225,-237,-216,-208,-234,-212,-223,-230,-231,150,-220,150,-215,150,-232,150,-211,-227,-229,-239,-214,-218,150,150,-213,150,-104,-209,-228,-219,150,-217,-222,-221,-238,-240,-233,-210,150,150,349,150,150,-226,]),'EXTERN':([163,4,31,23,117,34,67,242,56,13,10,32,133,54,59,227,269,130,65,359,1,280,36,358,15,123,235,26,167,6,230,51,238,12,339,49,134,60,73,58,5,353,38,2,387,231,332,237,3,0,30,27,116,28,360,327,9,39,37,64,8,196,35,18,277,140,221,260,17,165,22,287,205,20,25,52,],[-217,-163,-127,-117,-168,-125,-137,-219,-246,-133,-119,-242,-245,12,12,-154,-221,-170,-138,-251,12,-136,-129,-134,-2,-173,-172,-166,12,-122,-171,-104,-169,-118,-220,-165,-223,12,-247,-222,-131,-135,-164,12,-253,12,-155,12,12,-1,-241,12,-254,12,-252,12,-124,-121,-132,12,-123,-248,-128,-126,-243,12,12,-21
 8,-130,-244,-3,-250,-249,-167,-120,-156,]),'RETURN':([158,396,406,348,260,143,147,339,340,159,269,399,394,135,163,414,139,405,379,415,134,243,344,136,259,397,242,382,240,140,60,258,408,144,51,378,263,338,154,153,267,413,346,380,270,58,268,261,],[141,-232,141,-214,-218,-209,141,-220,-240,-211,-221,-229,141,-210,-217,-233,-208,-234,-215,-230,-223,-239,-216,-212,-225,-231,-219,141,141,141,141,-237,141,-226,-104,141,141,-236,-213,-224,141,-235,141,141,-228,-222,-238,-227,]),'__ASM__':([198,381,408,174,240,135,269,258,144,202,284,382,260,79,252,47,137,257,369,218,276,80,147,228,189,191,380,151,338,396,378,176,249,399,203,261,246,263,108,243,340,190,253,192,342,55,377,51,219,136,141,94,270,339,262,354,183,264,414,328,367,215,379,154,384,82,293,140,254,267,158,143,58,406,242,409,113,210,268,103,289,250,139,216,413,405,245,107,60,415,272,256,247,348,204,207,255,193,166,346,153,201,134,259,76,194,251,394,248,224,397,95,197,344,186,72,208,163,266,188,159,],[93,93,93,93,93,-210,-221,-2
 37,-226,93,93,93,-218,93,-96,93,93,93,93,93,93,93,93,93,93,93,93,93,-236,-232,93,93,-98,-229,93,-227,-93,93,-53,-239,-240,93,-90,93,93,93,93,-104,93,-212,93,-50,-228,-220,93,93,93,93,-233,93,93,93,-215,-213,93,-52,93,93,-94,93,93,-209,-222,93,-219,93,93,93,-238,-54,93,-91,-208,93,-235,-234,-95,-55,93,-230,93,93,-99,-214,93,93,-97,93,93,93,-224,93,-223,-225,93,93,-92,93,-100,93,-231,-51,93,-216,93,93,93,-217,93,93,-211,]),'CASE':([378,153,259,163,258,139,134,242,396,339,147,51,380,58,159,399,136,269,379,154,408,243,270,267,397,405,413,382,135,143,394,340,260,144,158,140,414,344,240,268,263,406,338,346,60,415,348,261,],[151,-224,-225,-217,-237,-208,-223,-219,-232,-220,151,-104,151,-222,-211,-229,-212,-221,-215,-213,151,-239,-228,151,-231,-234,-235,151,-210,-209,151,-240,-218,-226,151,151,-233,-216,151,-238,151,151,-236,151,151,-230,-214,-227,]),'PP_DEFINE_MACRO_NAME':([11,],[46,]),'&':([106,74,256,224,83,91,308,276,397,202,210,95,94,240,85,381,318,211,219,113,306,191,198,260,1
 53,315,184,192,140,384,388,257,142,338,215,84,369,254,154,340,242,312,216,409,220,188,413,382,134,251,313,163,261,423,99,316,292,185,78,379,245,270,213,420,76,104,289,174,255,317,218,262,248,189,197,90,135,58,252,166,314,77,300,247,399,108,293,301,194,79,364,363,368,302,310,179,298,183,190,406,414,307,250,249,253,207,299,246,267,139,180,366,342,243,264,272,60,378,144,103,284,339,102,295,258,143,408,367,96,193,296,311,354,107,75,268,137,309,141,101,109,377,82,203,396,72,80,405,158,100,186,176,348,217,410,228,204,97,105,182,51,328,269,47,55,201,344,394,86,136,181,208,147,155,266,151,159,415,305,346,259,297,263,380,110,],[210,-20,94,94,-65,-19,-70,94,-231,94,94,-51,-50,94,-62,94,-24,-29,94,94,-69,94,94,-218,-224,-77,-45,94,94,94,-32,94,-56,-236,94,-12,94,-94,-213,-240,-219,-74,94,94,-46,94,-235,94,-223,-92,-5,-217,-227,-35,-16,-26,-21,-47,-56,-215,-95,-228,-28,-34,94,-10,94,94,-97,-27,94,94,-100,94,94,-11,-210,-222,-96,94,-7,-9,-66,-99,-229,-53,94,-67,94,94,-48,-57,-23,210,-6,-
 15,-61,94,94,94,-233,-72,-91,-98,-90,94,-60,-93,94,-208,-16,-25,94,-239,94,94,94,94,-226,-54,94,-220,-4,-63,-237,-209,94,94,-73,94,-64,-75,94,-55,-18,-238,94,-8,94,-76,-43,94,-52,94,-232,94,94,-234,94,-22,94,94,-214,-17,-33,94,94,-58,-13,-44,-104,94,-221,94,94,94,-216,94,-68,-212,-14,94,94,-4,94,94,-211,-230,-71,94,-225,-59,94,94,-49,]),'*':([266,52,297,202,208,176,127,195,163,253,268,287,204,185,191,277,2,270,396,245,213,260,219,187,104,174,231,196,256,247,56,81,218,192,141,179,284,134,360,84,109,353,39,10,180,240,41,227,332,414,133,44,12,249,264,354,255,388,34,251,4,346,314,413,269,55,23,113,262,76,250,158,0,205,243,348,198,166,137,28,261,280,15,105,210,35,140,197,296,381,171,257,263,420,193,99,379,8,31,366,228,221,415,151,68,36,378,87,342,90,58,51,380,6,408,143,367,201,344,293,310,299,267,79,85,18,242,5,289,136,298,1,272,7,72,259,317,292,164,29,110,215,100,186,246,399,217,147,384,309,220,188,38,207,9,357,363,82,318,423,37,43,394,30,254,135,359,338,387,65,22,116,406,409,42
 ,16,358,3,61,364,313,339,405,224,25,211,316,77,252,102,410,73,216,165,184,139,13,32,114,276,382,107,75,94,369,97,144,368,103,189,74,80,182,328,67,153,377,258,159,108,57,194,95,181,155,60,142,340,47,91,397,78,183,248,17,203,154,295,190,],[95,-156,-59,95,95,95,7,-147,-217,-90,-238,-250,95,-47,95,-243,-107,-228,-232,-95,-28,-218,95,-145,-10,95,7,-248,95,-99,-246,-146,95,95,95,-15,95,-223,-252,-12,-43,-135,-121,-119,-16,95,-108,-154,-155,-233,-245,-178,-118,-98,95,95,-97,-32,-125,-92,-163,95,-7,-235,-221,95,-117,95,95,95,-91,95,-1,-249,-239,-214,95,95,95,-109,-227,-136,-2,-13,95,-128,95,95,190,95,7,95,95,-34,95,-16,-215,-123,-127,-25,95,7,-230,95,-179,-129,95,-148,95,-11,-222,-104,95,-122,95,-209,95,95,-216,95,-6,-60,95,95,190,-126,-219,-131,95,-212,-61,7,95,7,95,-225,-27,-21,7,7,-49,95,-22,95,-93,-229,-17,95,95,-8,-46,95,-164,95,-124,7,-57,-52,-24,-35,-132,7,95,-241,-94,-210,-251,-236,-253,-138,-3,-254,95,95,-112,7,-134,-111,-110,-48,-5,-220,-234,95,-120,-29,-26,-9,-96,-4,-33,-
 247,95,-244,-45,-208,-133,-242,7,95,95,-55,-18,-50,95,-58,-226,-23,-54,95,-20,95,-44,95,-137,-224,95,-237,-211,-53,7,95,-51,-14,-4,95,-56,-240,95,-19,-231,-56,95,-100,-130,95,-213,190,95,]),'SWITCH':([414,139,380,147,270,143,242,134,154,340,267,394,379,382,263,261,348,406,144,339,413,243,158,344,163,396,378,268,415,259,258,397,60,51,338,135,408,399,269,159,58,346,405,260,136,140,153,240,],[-233,-208,152,152,-228,-209,-219,-223,-213,-240,152,152,-215,152,152,-227,-214,152,-226,-220,-235,-239,152,-216,-217,-232,152,-238,-230,-225,-237,-231,152,-104,-236,-210,152,-229,-221,-211,-222,152,-234,-218,-212,152,-224,152,]),'AND_ASSIGN':([217,292,318,181,179,110,368,102,316,309,109,105,99,184,74,142,104,90,314,185,78,388,363,182,180,423,155,100,420,310,84,213,220,91,77,364,75,211,410,366,313,317,],[-17,-21,-24,-14,-15,-49,-23,-4,-26,-8,-43,-13,-16,-45,-20,249,-10,-11,-7,-47,-56,-32,-57,-44,-16,-35,-4,-22,-34,-6,-12,-28,-46,-19,-9,-48,-18,-29,-33,-25,-5,-27,]),'IDENTIFIER':([9,141,280,
 344,293,21,246,384,242,82,194,257,346,55,30,23,0,259,243,164,139,16,215,24,134,65,58,33,5,144,408,38,127,67,79,209,108,107,1,138,397,103,216,409,42,17,332,143,342,212,159,45,360,394,37,43,254,268,252,387,207,51,53,382,190,3,186,192,94,231,147,191,47,196,406,248,195,188,287,61,73,54,165,118,10,253,367,358,214,381,264,31,203,154,80,340,270,357,52,405,224,174,25,396,234,189,151,227,414,266,153,284,12,187,198,276,137,245,210,60,35,202,208,183,269,247,163,81,262,95,258,236,36,204,353,251,18,57,260,140,133,197,369,176,13,44,249,263,277,255,338,34,15,219,39,14,41,4,68,240,56,113,250,116,205,72,158,2,28,379,261,267,339,206,22,229,171,256,354,76,380,136,218,193,328,377,40,8,7,166,272,69,289,378,413,135,359,87,399,32,348,6,228,415,29,201,],[-124,102,-136,-216,102,-140,-93,102,-219,-52,102,102,155,102,-241,-117,-1,-225,-239,20,-208,20,102,52,-223,-138,-222,-139,-131,-226,155,-164,20,-137,102,102,-53,-55,20,239,-231,-54,102,102,-112,-130,-155,-209,102,316,-211,-176,-252,155,-132,-175,-9
 4,-238,-96,-253,102,-104,121,155,102,-111,102,102,-50,20,155,102,102,-248,155,-100,-147,102,-250,-110,-247,124,-244,121,-119,-90,102,-134,317,102,102,-127,102,-213,102,-240,-228,20,-156,-234,102,102,-120,-232,20,102,102,-154,-233,102,-224,102,-118,-145,102,102,102,-95,102,155,-128,102,102,102,-221,-99,-217,-146,102,-51,-237,335,-129,102,-135,-92,-126,20,-218,155,-245,102,102,102,-133,-178,-98,155,-243,-97,-236,-125,-2,102,-121,20,-108,-163,-179,155,-246,102,-91,-254,-249,102,155,-107,-109,-215,-227,155,-220,102,-3,121,20,102,102,102,155,-212,102,102,102,102,67,-123,-174,102,102,-177,102,155,-235,-210,-251,-148,-229,-242,-214,-122,102,-230,20,102,]),'ADD_ASSIGN':([420,317,104,109,313,78,155,213,91,179,181,185,182,423,90,314,100,220,180,316,364,363,77,105,366,309,142,310,410,75,368,217,211,74,102,388,292,184,318,84,99,110,],[-34,-27,-10,-43,-5,-56,-4,-28,-19,-15,-14,-47,-44,-35,-11,-7,-22,-46,-16,-26,-48,-57,-9,-13,-25,-8,254,-6,-33,-18,-23,-17,-29,-20,-4,-32,-21,-45,-24,-12,-
 16,-49,]),'>':([300,85,86,296,102,181,77,298,105,307,364,410,423,309,313,211,317,182,213,388,366,292,220,155,74,314,363,306,318,312,91,110,217,83,308,90,310,96,100,179,75,305,109,311,97,180,99,184,299,420,142,104,297,368,84,301,316,295,185,78,],[-66,-62,-68,-64,-4,-14,-9,-61,-13,-72,-48,-33,-35,-8,-5,-29,-27,-44,-28,-32,-25,-21,-46,-4,-20,-7,-57,-69,-24,204,-19,-49,-17,-65,-70,-11,-6,204,-22,-15,-18,-71,-43,204,-58,-16,-16,-45,-60,-34,-56,-10,-59,-23,-12,-67,-26,-63,-47,-56,]),'PP_MACRO_PARAM':([263,354,103,82,207,245,414,144,203,58,139,174,84,289,215,108,186,76,254,192,74,290,216,143,369,47,250,202,381,276,266,141,377,258,189,158,247,113,137,218,261,219,90,206,328,270,382,188,269,51,260,135,153,183,204,367,264,346,257,190,268,107,217,406,134,396,240,154,256,293,197,380,413,253,259,99,94,210,163,272,246,198,201,267,348,176,339,262,249,394,405,384,224,151,340,415,255,193,344,111,181,378,338,105,194,147,408,209,80,60,243,180,191,379,251,284,166,399,409,136,140,79,252,70,55,208
 ,242,95,228,397,248,159,342,179,72,],[99,99,-54,-52,99,-95,-233,-226,99,-222,-208,99,-12,99,99,-53,99,99,-94,99,180,362,99,-209,99,99,-91,99,99,99,99,99,99,-237,99,99,-99,99,99,99,-227,99,-11,309,99,-228,99,99,-221,-104,-218,-210,-224,99,99,99,99,99,99,99,-238,-55,-17,99,-223,-232,99,-213,99,99,99,99,-235,-90,-225,-16,-50,99,-217,99,-93,99,99,99,-214,99,-220,99,-98,99,-234,99,99,99,-240,-230,-97,99,-216,217,-14,99,-236,-13,99,99,99,314,99,99,-239,-16,99,-215,-92,99,99,-229,99,-212,99,99,-96,173,99,99,-219,-51,99,-231,-100,-211,99,-15,99,]),'GE_OP':([211,110,84,297,90,184,83,317,104,78,306,312,300,182,180,155,299,181,179,109,423,301,307,102,77,97,213,142,75,364,86,308,316,388,96,363,366,314,292,220,99,217,74,310,313,305,296,420,295,311,100,91,368,185,85,105,298,318,410,309,],[-29,-49,-12,-59,-11,-45,-65,-27,-10,-56,-69,203,-66,-44,-16,-4,-60,-14,-15,-43,-35,-67,-72,-4,-9,-58,-28,-56,-18,-48,-68,-70,-26,-32,203,-57,-25,-7,-21,-46,-16,-17,-20,-6,-5,-71,-64,-34,-63,203,-22,-19,-
 23,-47,-62,-13,-61,-24,-33,-8,]),'^':([366,179,315,298,296,91,97,100,155,99,312,420,363,110,104,388,299,185,182,96,90,317,311,220,410,75,85,297,306,301,184,423,314,308,313,180,291,307,310,102,78,142,292,295,74,316,302,106,211,368,109,213,300,181,305,89,364,309,101,318,77,217,86,84,105,83,],[-25,-15,-77,-61,-64,-19,-58,-22,-4,-16,-74,-34,-57,-49,-10,-32,-60,-47,-44,-73,-11,-27,-75,-46,-33,-18,-62,-59,-69,-67,-45,-35,-7,-70,-5,-16,197,-72,-6,-4,-56,-56,-21,-63,-20,-26,-79,-78,-29,-23,-43,-28,-66,-14,-71,197,-48,-8,-76,-24,-9,-17,-68,-12,-13,-65,]),'MOD_ASSIGN':([142,182,74,368,104,185,110,155,84,100,181,184,211,388,318,292,217,363,423,213,109,420,77,90,314,364,310,316,220,78,105,313,366,180,99,102,91,410,317,179,75,309,],[246,-44,-20,-23,-10,-47,-49,-4,-12,-22,-14,-45,-29,-32,-24,-21,-17,-57,-35,-28,-43,-34,-9,-11,-7,-48,-6,-26,-46,-56,-13,-5,-25,-16,-16,-4,-19,-33,-27,-15,-18,-8,]),':':([297,5,97,235,318,291,99,86,162,403,317,142,310,90,110,358,364,78,292,84,309,52,83,220,195
 ,332,301,75,296,31,123,388,187,74,322,302,115,323,130,238,36,391,265,180,184,105,308,365,419,299,341,313,8,343,316,303,307,100,104,306,295,49,38,9,92,357,20,109,298,37,227,89,149,185,71,87,117,77,81,311,412,363,102,423,404,101,65,368,213,160,217,420,67,4,96,211,179,417,353,418,230,35,282,171,106,18,402,315,34,389,300,182,112,314,366,85,26,305,13,410,181,155,17,6,312,91,280,],[-59,-131,-58,-172,-24,-81,-16,-68,-88,-37,-27,-56,-6,-11,-49,-134,-48,-56,-21,-12,-8,-156,-65,-46,-147,-155,-67,-18,-64,-127,-173,-32,-145,-20,-85,-79,-103,369,-170,-169,-129,-87,346,-16,-45,-13,-70,389,-40,-60,-89,-5,-123,-102,-26,-83,-72,-22,-10,-69,-63,-165,-164,-124,-84,284,-167,-43,-61,-132,-154,-80,263,-47,-82,-148,-168,-9,-146,-75,-36,-57,-4,-35,412,-76,-138,-23,-28,-101,-17,-34,-137,-163,-73,-29,-15,-38,-135,421,-171,-128,354,284,-78,-126,-39,-77,-125,-36,-66,-44,-86,-7,-25,-62,-166,-71,-133,-33,-14,267,-130,-122,-74,-19,-136,]),'TYPE_NAME':([13,65,51,67,167,286,47,327,35,32,278,169,279,60,130,2
 1,37,31,28,260,227,5,359,12,137,235,165,56,87,283,172,59,242,54,30,168,20,140,353,22,8,221,116,387,4,6,64,205,10,2,134,360,339,230,123,36,66,38,356,3,33,231,27,9,196,163,34,39,15,1,17,238,269,81,40,18,133,58,52,25,237,358,49,277,186,287,26,72,73,280,170,117,0,332,23,],[-133,-138,-104,-137,13,13,13,13,-128,-242,13,13,-142,13,-170,-140,-132,-127,13,-218,-154,-131,-251,-118,13,-172,-244,-246,13,-144,13,13,-219,13,-241,13,-167,13,-135,-3,-123,13,-254,-253,-163,-122,13,-249,-119,13,-223,-252,-220,-171,-173,-129,13,-164,-143,13,-139,13,13,-124,-248,-217,-125,-121,-2,13,-130,-169,-221,13,65,-126,-245,-222,-156,-120,13,-134,-165,-243,13,-250,-166,13,-247,-136,-141,-168,-1,-155,-117,]),'OR_ASSIGN':([211,179,420,110,91,99,102,310,78,142,213,423,185,184,368,388,317,75,182,90,364,410,314,100,180,155,309,217,109,84,74,105,220,104,316,363,366,318,181,77,313,292,],[-29,-15,-34,-49,-19,-16,-4,-6,-56,248,-28,-35,-47,-45,-23,-32,-27,-18,-44,-11,-48,-33,-7,-22,-16,-4,-8,-17,-43,-12,-20,-13,-46
 ,-10,-26,-57,-25,-24,-14,-9,-5,-21,]),'BREAK':([263,258,269,242,158,378,260,399,397,58,270,346,382,379,261,134,154,139,408,51,135,153,344,348,259,396,338,243,268,415,380,406,163,340,267,414,159,405,240,394,339,144,147,136,60,140,143,413,],[156,-237,-221,-219,156,156,-218,-229,-231,-222,-228,156,156,-215,-227,-223,-213,-208,156,-104,-210,-224,-216,-214,-225,-232,-236,-239,-238,-230,156,156,-217,-240,156,-233,-211,-234,156,156,-220,-226,156,-212,156,156,-209,-235,]),'VOLATILE':([237,205,170,66,353,130,360,47,327,260,8,26,4,235,59,339,356,38,387,140,23,17,117,242,358,279,93,1,0,54,18,5,169,287,35,168,221,6,186,37,52,123,283,25,133,269,81,7,31,277,116,56,87,9,231,13,165,12,227,20,60,27,65,137,359,72,230,58,278,3,286,172,43,51,22,2,332,68,73,39,15,64,280,32,238,196,163,167,34,67,134,28,44,36,10,49,30,],[38,-249,-141,38,-135,-170,-252,38,38,-218,-123,-166,-163,-172,38,-220,-143,-164,-253,38,-117,-130,-168,-219,-134,-142,199,38,-1,38,-126,-131,38,-250,-128,38,38,-122,38,-132,-156,-
 173,-144,-120,-245,-221,38,38,-127,-243,-254,-246,38,-124,38,-133,-244,-118,-154,-167,38,38,-138,38,-251,38,-171,-222,38,38,38,38,38,-104,-3,38,-155,-179,-247,-121,-2,38,-136,-242,-169,-248,-217,38,-125,-137,-223,38,-178,-129,-119,-165,-241,]),'error':([136,380,378,153,147,346,338,399,261,344,342,58,159,51,267,382,139,260,414,242,154,143,408,135,413,394,158,163,257,396,144,340,243,269,259,134,379,140,270,263,11,348,268,405,240,397,60,258,415,406,339,],[-212,241,241,-224,241,241,-236,-229,-227,-216,241,-222,-211,-104,241,241,-208,-218,-233,-219,-213,-209,241,-210,-235,241,241,-217,241,-232,-226,-240,-239,-221,-225,-223,-215,241,-228,241,48,-214,-238,-234,241,-231,157,-237,-230,241,-220,]),'~':([397,113,103,58,380,189,252,246,79,256,262,263,254,194,413,251,344,139,202,382,183,249,147,82,204,144,201,191,394,207,384,215,186,154,340,243,141,405,188,342,339,136,284,193,261,381,76,269,293,266,159,248,268,399,276,60,289,47,242,94,55,258,219,406,250,174,240,379,253,414,151,192,198,10
 7,255,137,367,216,409,203,396,80,158,224,260,135,176,166,245,153,143,377,210,190,354,328,247,218,208,163,257,140,197,72,259,348,378,338,369,267,264,346,51,270,408,108,134,228,95,272,415,],[-231,103,-54,-222,103,103,-96,-93,103,103,103,103,-94,103,-235,-92,-216,-208,103,103,103,-98,103,-52,103,-226,103,103,103,103,103,103,103,-213,-240,-239,103,-234,103,103,-220,-212,103,103,-227,103,103,-221,103,103,-211,-100,-238,-229,103,103,103,103,-219,-50,103,-237,103,103,-91,103,103,-215,-90,-233,103,103,103,-55,-97,103,103,103,103,103,-232,103,103,103,-218,-210,103,103,-95,-224,-209,103,103,103,103,103,-99,103,103,-217,103,103,103,103,-225,-214,103,-236,103,103,103,103,-104,-228,103,-53,-223,103,-51,103,-230,]),'DO':([243,346,339,153,163,338,136,158,396,415,380,51,258,405,242,382,397,159,263,267,240,259,408,270,134,140,340,348,143,144,413,135,378,379,154,399,147,414,260,139,60,269,394,268,406,344,261,58,],[-239,158,-220,-224,-217,-236,-212,158,-232,-230,158,-104,-237,-234,-219,158,-23
 1,-211,158,158,158,-225,158,-228,-223,158,-240,-214,-209,-226,-235,-210,158,-215,-213,-229,158,-233,-218,-208,158,-221,158,-238,158,-216,-227,-222,]),'CONST':([359,205,81,31,9,49,2,32,196,37,72,20,59,10,242,18,358,231,15,117,60,235,123,26,360,6,34,260,230,356,54,353,25,27,3,8,167,67,269,133,332,227,140,237,283,56,4,38,43,186,64,12,51,39,44,68,221,278,5,7,134,47,52,280,87,163,327,339,168,286,13,58,36,23,66,287,73,169,116,17,0,137,279,238,130,22,170,65,172,387,30,35,277,28,1,165,],[-251,-249,4,-127,-124,-165,4,-242,-248,-132,4,-167,4,-119,-219,-126,-134,4,-2,-168,4,-172,-173,-166,-252,-122,-125,-218,-171,-143,4,-135,-120,4,4,-123,4,-137,-221,-245,-155,-154,4,4,-144,-246,-163,-164,4,4,4,-118,-104,-121,-178,-179,4,4,-131,4,-223,4,-156,-136,4,-217,4,-220,4,4,-133,-222,-129,-117,4,-250,-247,4,-254,-130,-1,4,-142,-169,-170,-3,-141,-138,4,-253,-241,-128,-243,4,4,-244,]),'!':([107,246,204,242,147,269,189,262,208,58,176,243,254,263,183,108,379,270,51,338,342,380,405,159,79,293,80,215,
 139,396,339,193,381,256,197,249,47,369,260,95,399,354,144,250,408,247,163,201,348,397,216,272,266,413,186,194,328,382,257,198,82,289,261,367,76,414,253,255,409,224,153,202,267,406,218,113,136,140,143,60,207,377,240,174,141,134,340,190,228,384,219,344,251,94,137,276,284,394,191,210,378,135,258,55,158,252,245,203,154,264,192,268,415,188,103,248,259,166,72,151,346,],[-55,-93,107,-219,107,-221,107,107,107,-222,107,-239,-94,107,107,-53,-215,-228,-104,-236,107,107,-234,-211,107,107,107,107,-208,-232,-220,107,107,107,107,-98,107,107,-218,-51,-229,107,-226,-91,107,-99,-217,107,-214,-231,107,107,107,-235,107,107,107,107,107,107,-52,107,-227,107,107,-233,-90,-97,107,107,-224,107,107,107,107,107,-212,107,-209,107,107,107,107,107,107,-223,-240,107,107,107,107,-216,-92,-50,107,107,107,107,107,107,107,-210,-237,107,107,-96,-95,107,-213,107,107,-238,-230,107,-54,-100,-225,107,107,107,107,]),'%':([364,295,314,90,388,179,217,292,75,110,77,180,420,309,299,296,84,423,109,220,368,363,142,74,78,
 99,102,100,310,297,182,184,318,316,366,298,185,97,317,313,155,104,91,85,211,181,213,410,105,],[-48,191,-7,-11,-32,-15,-17,-21,-18,-49,-9,-16,-34,-8,-60,191,-12,-35,-43,-46,-23,-57,-56,-20,-56,-16,-4,-22,-6,-59,-44,-45,-24,-26,-25,-61,-47,-58,-27,-5,-4,-10,-19,191,-29,-14,-28,-33,-13,]),')':([68,106,391,175,12,416,310,318,179,217,70,331,261,309,423,280,177,97,403,42,392,52,371,418,6,54,332,213,50,184,130,223,299,231,13,311,294,195,127,325,335,373,92,388,238,312,20,75,398,123,419,215,44,74,84,337,316,185,38,86,162,233,31,364,296,389,350,142,5,104,114,358,99,227,173,307,144,319,28,124,420,322,69,292,187,305,330,100,17,65,211,336,78,128,303,341,180,9,345,366,376,422,365,234,102,182,300,77,317,297,101,295,343,49,225,393,368,160,96,71,85,8,3,320,178,7,126,306,347,23,61,67,109,395,37,220,110,41,370,270,363,89,404,232,181,291,18,410,129,313,25,90,2,10,34,39,87,377,324,230,43,81,314,390,372,91,301,221,112,417,402,105,412,302,235,117,315,36,298,308,35,83,125,362,421,45,327,326,374,353
 ,222,26,4,],[-179,-78,-87,289,-118,419,-6,-24,-15,-17,174,-193,-227,-8,-35,-136,292,-58,-37,-112,-202,-156,-194,420,-122,123,-155,-28,117,-45,-170,-192,-60,324,-133,-75,364,-147,-186,370,-188,392,-84,-32,-169,-74,-167,-18,407,-173,-40,318,-178,-20,-12,-183,-26,-47,-164,-68,-88,-184,-127,-48,-64,-36,382,-56,-131,-10,-189,-134,-16,-154,-255,-72,-226,-30,-109,-187,-34,-85,-177,-21,-145,-71,-195,-22,-130,-138,-29,-181,-56,235,-83,-89,-16,-124,378,-25,-196,423,388,-191,-4,-44,-66,-9,-27,-59,-76,-63,-102,-165,-191,-198,-23,-101,-73,-82,-62,-123,-111,366,293,-174,-182,-69,380,-117,-110,-137,-43,406,-132,-46,-49,-108,-200,-228,-57,-80,410,-185,-14,-81,-126,-33,-180,-5,-120,-11,-107,-119,-125,-121,-148,394,-199,-171,-175,-146,-7,-31,-201,-19,-67,324,-86,-38,-39,-13,-36,-79,-172,-168,-77,-129,-61,-70,-128,-65,230,-256,-36,-176,372,371,-197,-135,-190,-166,-163,]),'-':([378,240,364,83,409,202,252,197,247,261,97,219,248,110,72,414,250,310,245,309,354,318,99,183,94,313,154,176,180,262,379
 ,413,296,184,181,203,207,381,190,55,297,104,76,377,420,408,301,316,51,201,251,328,263,75,79,74,155,246,249,292,188,396,182,174,108,243,314,135,254,298,348,340,186,399,185,95,338,85,107,159,255,367,192,384,368,346,394,253,220,289,256,344,105,77,267,369,210,228,216,264,423,363,242,218,217,189,415,103,91,143,82,134,60,136,342,47,397,339,166,109,299,163,194,405,268,272,58,366,78,100,388,224,258,276,193,269,257,295,211,266,270,158,84,140,410,139,208,293,260,259,317,141,300,102,90,142,406,137,382,113,80,153,215,380,144,204,213,191,179,151,198,147,284,],[108,108,-48,189,108,108,-96,108,-99,-227,-58,108,-100,-49,108,-233,-91,-6,-95,-8,108,-24,-16,108,-50,-5,-213,108,-16,108,-215,-235,-64,-45,-14,108,108,108,108,108,-59,-10,108,108,-34,108,189,-26,-104,108,-92,108,108,-18,108,-20,-4,-93,-98,-21,108,-232,-44,108,-53,-239,-7,-210,-94,-61,-214,-240,108,-229,-47,-51,-236,-62,-55,-211,-97,108,108,108,-23,108,108,-90,-46,108,108,-216,-13,-9,108,108,108,108,108,108,-35,-57,-219,108,-17,108,
 -230,-54,-19,-209,-52,-223,108,-212,108,108,-231,-220,108,-43,-60,-217,108,-234,-238,108,-222,-25,-56,-22,-32,108,-237,108,108,-221,108,-63,-29,108,-228,108,-12,108,-33,-208,108,108,-218,-225,-27,108,189,-4,-11,-56,108,108,108,108,108,-224,108,108,-226,108,-28,108,-15,108,108,108,108,]),'CHARACTER_CONSTANT':([380,414,250,204,245,186,191,151,284,94,154,252,140,413,247,248,381,72,377,272,201,183,176,382,379,249,203,207,55,174,76,344,408,51,254,396,415,251,338,263,79,158,293,289,346,394,240,253,188,369,210,262,108,243,144,348,340,242,218,399,189,107,159,367,192,82,134,342,378,256,397,219,166,267,228,216,264,194,405,58,135,255,193,269,257,103,143,266,270,60,136,47,339,163,141,268,354,406,137,328,224,276,113,153,139,208,260,190,259,258,384,147,198,409,202,80,215,197,261,95,246,],[104,-233,-91,104,-95,104,104,104,104,-50,-213,-96,104,-235,-99,-100,104,104,104,104,104,104,104,104,-215,-98,104,104,104,104,104,-216,104,-104,-94,-232,-230,-92,-236,104,104,104,104,104,104,104,104,-90,1
 04,104,104,104,-53,-239,-226,-214,-240,-219,104,-229,104,-55,-211,104,104,-52,-223,104,104,104,-231,104,104,104,104,104,104,104,-234,-222,-210,-97,104,-221,104,-54,-209,104,-228,104,-212,104,-220,-217,104,-238,104,104,104,104,104,104,104,-224,-208,104,-218,104,-225,-237,104,104,104,104,104,104,104,104,-227,-51,-93,]),'PP_STRINGIFY':([139,242,176,82,58,268,381,103,94,99,367,259,84,163,147,272,204,354,382,344,328,174,266,183,394,269,255,399,413,249,263,409,134,396,72,180,193,190,107,218,252,202,408,136,210,194,217,113,257,256,51,189,188,47,207,208,379,108,228,151,95,270,105,247,181,415,348,250,342,405,159,143,253,397,186,135,240,377,243,198,219,339,261,245,260,144,264,154,79,191,262,216,267,215,369,197,254,414,140,346,246,289,338,380,224,384,192,158,90,74,284,248,153,406,60,141,378,55,179,76,166,258,340,203,137,293,80,251,201,276,],[-208,-219,111,-52,-222,-238,111,-54,-50,-16,111,-225,-12,-217,111,111,111,111,111,-216,111,111,111,111,111,-221,-97,-229,-235,-98,111,111,-223,-23
 2,111,-16,111,111,-55,111,-96,111,111,-212,111,111,-17,111,111,111,-104,111,111,111,111,111,-215,-53,111,111,-51,-228,-13,-99,-14,-230,-214,-91,111,-234,-211,-209,-90,-231,111,-210,111,111,-239,111,111,-220,-227,-95,-218,-226,111,-213,111,111,111,111,111,111,111,111,-94,-233,111,111,-93,111,-236,111,111,111,111,111,-11,111,111,-100,-224,111,111,111,111,111,-15,111,111,-237,-240,111,111,111,111,-92,111,111,]),'PTR_OP':([211,109,84,309,317,217,74,181,77,91,213,90,104,102,292,313,100,368,99,179,314,180,310,366,316,318,105,75,155,],[-29,214,-12,-8,-27,-17,-20,-14,-9,-19,-28,-11,-10,-4,-21,-5,-22,-23,-16,-15,-7,-16,-6,-25,-26,-24,-13,-18,-4,]),'=':([117,26,132,104,99,182,185,90,142,78,220,109,313,49,155,213,91,179,316,121,423,366,314,100,363,235,180,238,64,130,230,102,388,105,123,211,364,84,410,317,110,368,20,310,420,309,75,74,292,184,217,181,77,318,],[-168,-166,166,-10,-16,-44,-47,-11,253,-56,-46,-43,-5,-165,-4,-28,-19,-15,-26,228,-35,-25,-7,-22,-57,-172,-16,-169,166,-170,-171,-
 4,-32,-13,-173,-29,-48,-12,-33,-27,-49,-23,-167,-6,-34,-8,-18,-20,-21,-45,-17,-14,-9,-24,]),'TYPEDEF':([58,52,242,359,358,3,123,35,287,34,28,227,196,38,231,26,130,165,64,327,269,15,2,37,6,205,25,51,230,133,54,73,235,280,10,5,27,31,30,117,18,332,134,221,20,8,260,67,9,13,1,387,56,60,49,23,32,237,36,116,59,140,238,339,163,17,167,277,353,12,22,65,0,4,39,360,],[-222,-156,-219,-251,-134,23,-173,-128,-250,-125,23,-154,-248,-164,23,-166,-170,-244,23,23,-221,-2,23,-132,-122,-249,-120,-104,-171,-245,23,-247,-172,-136,-119,-131,23,-127,-241,-168,-126,-155,-223,23,-167,-123,-218,-137,-124,-133,23,-253,-246,23,-165,-117,-242,23,-129,-254,23,23,-169,-220,-217,-130,23,-243,-135,-118,-3,-138,-1,-163,-121,-252,]),'AUTO':([130,287,140,60,3,133,242,31,353,116,235,327,28,34,59,8,56,269,37,27,65,18,339,221,231,13,17,39,36,332,6,10,277,230,35,26,38,32,227,67,30,64,280,15,9,360,359,25,12,73,165,387,1,49,2,117,260,123,205,5,51,23,238,20,237,196,4,167,58,358,22,163,52,0,134,54,],[-170,-250,25,25,25,
 -245,-219,-127,-135,-254,-172,25,25,-125,25,-123,-246,-221,-132,25,-138,-126,-220,25,25,-133,-130,-121,-129,-155,-122,-119,-243,-171,-128,-166,-164,-242,-154,-137,-241,25,-136,-2,-124,-252,-251,-120,-118,-247,-244,-253,25,-165,25,-168,-218,-173,-249,-131,-104,-117,-169,-167,25,-248,-163,25,-222,-134,-3,-217,-156,-1,-223,25,]),'AND_OP':([142,101,301,97,86,308,423,77,213,364,366,109,75,295,420,311,220,211,363,84,217,296,90,110,100,300,96,106,99,302,92,83,185,89,314,104,322,299,74,105,310,410,71,307,91,368,155,313,180,292,312,303,298,309,182,306,184,318,179,317,102,291,388,297,181,315,305,85,316,78,],[-56,-76,-67,-58,-68,-70,-35,-9,-28,-48,-25,-43,-18,-63,-34,-75,-46,-29,-57,-12,-17,-64,-11,-49,-22,-66,-73,-78,-16,-79,198,-65,-47,-80,-7,-10,198,-60,-20,-13,-6,-33,-82,-72,-19,-23,-4,-5,-16,-21,-74,-83,-61,-8,-44,-69,-45,-24,-15,-27,-4,-81,-32,-59,-14,-77,-71,-62,-26,-56,]),']':([106,316,301,303,423,102,322,368,410,296,420,77,211,74,364,363,220,292,109,180,217,112,302,298,305,99,
 321,160,295,89,311,318,86,300,104,83,329,309,105,97,185,131,110,315,92,224,312,181,75,313,55,328,343,308,307,96,184,71,115,182,85,388,341,375,179,101,306,213,366,314,297,391,84,78,317,299,291,100,91,90,162,142,310,],[-78,-26,-67,-83,-35,-4,-85,-23,-33,-64,-34,-9,-29,-20,-48,-57,-46,-21,-43,-16,-17,-86,-79,-61,-71,-16,368,-101,-63,-80,-75,-24,-68,-66,-10,-65,376,-8,-13,-58,-47,238,-49,-77,-84,330,-74,-14,-18,-5,130,374,-102,-70,-72,-73,-45,-82,-103,-44,-62,-32,-89,393,-15,-76,-69,-28,-25,-7,-59,-87,-12,-56,-27,-60,-81,-22,-19,-11,-88,-56,-6,]),'IF':([260,399,382,269,413,344,406,147,380,144,134,346,58,408,405,394,51,379,259,415,268,396,340,378,60,154,240,140,261,159,242,135,263,153,243,258,139,414,267,348,338,397,158,163,143,136,339,270,],[-218,-229,161,-221,-235,-216,161,161,161,-226,-223,161,-222,161,-234,161,-104,-215,-225,-230,-238,-232,-240,161,161,-213,161,161,-227,-211,-219,-210,161,-224,-239,-237,-208,-233,161,-214,-236,-231,161,-217,-209,-212,-220,-228,]),'STRING_LITE
 RAL':([414,134,252,412,255,193,90,105,228,216,257,249,409,179,394,260,243,210,103,76,369,55,219,245,269,354,380,192,413,208,348,84,248,218,263,289,163,201,60,136,204,242,108,268,251,151,186,51,250,240,190,207,276,94,180,256,99,377,72,189,224,258,259,166,344,95,397,264,399,284,338,405,153,415,421,408,194,346,113,367,139,342,254,340,406,411,159,147,389,215,174,253,135,379,158,262,188,378,107,141,267,246,143,80,197,47,74,396,144,304,261,82,328,191,217,381,247,181,339,154,198,384,58,183,203,272,137,79,266,176,293,202,382,270,140,],[-233,-223,-96,90,-97,90,-11,-13,90,90,90,-98,90,-15,90,-218,-239,90,-54,90,90,90,90,-95,-221,90,90,90,-235,90,-214,-12,-100,90,90,90,-217,90,90,-212,90,-219,-53,-238,-92,90,90,-104,-91,90,90,90,90,-50,-16,90,-16,90,90,90,90,-237,-225,90,-216,-51,-231,90,-229,90,-236,-234,-224,-230,90,90,90,90,90,90,-208,90,-94,-240,90,90,-211,90,90,90,90,-90,-210,-215,90,90,90,90,-55,90,90,-93,-209,90,90,90,90,-232,-226,90,-227,-52,90,90,-17,90,-99,-14,-220,-213,90,90
 ,-222,90,90,90,90,90,90,90,90,90,90,-228,90,]),'PP_IDENTIFIER_PASTE':([102,314,99,309,155,],[209,206,206,206,209,]),'PP_DEFINE_NAME':([11,],[47,]),'FLOAT':([64,13,186,231,165,56,1,35,39,10,277,9,360,36,49,32,237,238,227,332,168,52,278,133,287,60,116,134,283,66,23,12,356,196,260,54,6,72,73,28,58,130,4,17,0,205,359,20,81,235,123,221,167,3,51,353,358,18,387,286,172,34,170,137,65,269,22,37,26,31,27,327,163,47,25,30,59,242,140,2,67,8,169,5,279,87,339,280,117,15,230,38,],[31,-133,31,31,-244,-246,31,-128,-121,-119,-243,-124,-252,-129,-165,-242,31,-169,-154,-155,31,-156,31,-245,-250,31,-254,-223,-144,31,-117,-118,-143,-248,-218,31,-122,31,-247,31,-222,-170,-163,-130,-1,-249,-251,-167,31,-172,-173,31,31,31,-104,-135,-134,-126,-253,31,31,-125,-141,31,-138,-221,-3,-132,-166,-127,31,31,-217,31,-120,-241,31,-219,31,31,-137,-123,31,-131,-142,31,-220,-136,-168,-2,-171,-164,]),'LEFT_ASSIGN':([314,184,180,317,313,316,366,109,364,99,318,102,410,368,100,78,179,423,104,309,142,74,420,110,84,292
 ,211,388,181,220,185,90,213,182,217,310,363,105,91,155,77,75,],[-7,-45,-16,-27,-5,-26,-25,-43,-48,-16,-24,-4,-33,-23,-22,-56,-15,-35,-10,-8,252,-20,-34,-49,-12,-21,-29,-32,-14,-46,-47,-11,-28,-44,-17,-6,-57,-13,-19,-4,-9,-18,]),'}':([157,283,292,379,413,181,269,278,275,344,261,112,242,322,303,296,383,391,71,384,159,300,102,286,142,364,85,211,220,51,60,139,109,120,154,163,338,260,106,115,119,279,352,258,312,298,313,84,340,302,311,268,317,414,366,91,420,179,136,363,397,77,333,122,243,74,368,162,135,240,217,226,401,396,169,356,351,299,182,308,270,309,339,348,306,334,291,388,104,147,89,90,99,315,105,144,400,97,399,140,185,101,297,110,423,213,301,310,78,295,307,316,75,410,259,405,121,341,96,58,184,305,314,180,100,170,134,143,318,86,415,83,153,92,229,],[269,-144,-21,-215,-235,-14,-221,353,-203,-216,-227,-86,-219,-85,-83,-64,-204,-87,-82,401,-211,-66,-4,358,-56,-48,-62,-29,-46,-104,163,-208,-43,227,-213,-217,-236,-218,-78,-103,-159,-142,383,-237,-74,-61,-5,-12,-240,-79,-75,-238,-27
 ,-233,-25,-19,-34,-15,-212,-57,-231,-9,-162,-157,-239,-20,-23,-88,-210,339,-17,332,-205,-232,280,-143,-206,-60,-44,-70,-228,-8,-220,-214,-69,-160,-81,-32,-10,260,-80,-11,-16,-77,-13,-226,-207,-58,-229,242,-47,-76,-59,-49,-35,-28,-67,-6,-56,-63,-72,-26,-18,-33,-225,-234,-161,-89,-73,-222,-45,-71,-7,-16,-22,-141,-223,-209,-24,-68,-230,-65,-224,-84,-158,]),}
+
+_lr_action = { }
+for _k, _v in _lr_action_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       _lr_action[(_x,_k)] = _y
+del _lr_action_items
+
+_lr_goto_items = {'expression_statement':([240,257,382,267,140,406,380,342,263,60,408,147,378,346,158,394,],[135,342,135,135,135,135,135,377,135,135,135,135,135,135,135,135,]),'storage_class_specifier':([140,27,28,327,167,221,60,54,59,64,2,3,1,237,231,],[2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,]),'declaration_list':([64,60,27,],[167,140,59,]),'type_qualifier':([60,72,87,28,169,47,27,327,221,59,54,278,7,81,43,2,237,140,168,64,231,286,172,167,3,66,186,1,137,],[3,87,87,3,87,87,3,3,3,3,3,87,44,87,68,3,3,3,87,3,3,87,87,3,3,87,87,3,87,]),'constant':([262,380,186,176,191,408,377,266,207,409,60,183,367,80,203,202,328,276,394,166,293,272,151,76,194,378,289,158,201,147,72,190,369,228,346,79,264,197,204,381,55,224,193,257,198,382,284,174,256,47,216,141,267,384,192,189,240,140,263,137,188,354,218,208,406,219,342,113,210,215,],[91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,9
 1,91,91,91,91,91,91,91,91,91,91,91,91,]),'volatile_opt':([93,],[200,]),'unary_expression':([256,208,47,166,346,192,197,202,141,189,194,263,215,289,198,158,201,219,409,381,207,60,369,367,193,257,176,191,137,328,174,151,276,394,384,382,408,380,342,183,76,80,203,262,147,218,354,204,377,272,224,113,210,186,284,72,228,190,79,264,216,55,267,266,293,406,240,140,188,378,],[142,78,78,142,142,78,78,78,142,78,78,142,142,78,78,142,78,142,142,142,78,142,78,142,78,142,78,78,142,78,78,78,142,142,142,142,142,142,142,142,182,185,78,142,142,78,78,78,142,142,78,78,78,142,78,142,78,78,184,142,142,78,142,142,78,142,142,142,78,142,]),'struct_or_union_specifier':([221,2,59,278,168,27,64,169,87,137,231,72,1,66,54,186,47,81,167,28,172,237,3,140,60,286,327,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'exclusive_or_expression':([262,272,264,408,328,384,219,224,228,72,216,141,342,367,284,377,166,289,218,267,276,257,266,198,186,140,55,60,382,256,147,176,240,47,158,381,215,137,394,369,378,
 354,380,263,183,174,406,346,151,409,],[89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,291,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,]),'identifier_list':([54,],[128,]),'define':([1,],[22,]),'initializer':([384,276,166,],[400,351,274,]),'macro_param':([240,147,186,151,208,198,191,384,193,257,60,203,140,380,408,79,381,262,55,176,276,210,194,201,378,47,224,293,202,80,189,328,346,190,174,219,256,166,266,264,216,367,192,272,197,284,409,377,158,267,72,369,406,215,137,76,342,188,141,354,218,382,207,228,183,394,204,263,113,289,74,],[105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,179,]),'struct_declaration_list':([172,168,66,],[286,278,169,]),'macro_parameter_list':([70,],[175,]),'struct_declaratio
 n':([66,168,172,278,169,286,],[170,170,170,279,279,279,]),'abstract_declarator':([221,114,231,127,],[326,222,326,232,]),'iteration_statement':([406,382,240,378,60,263,158,380,140,267,147,394,408,346,],[136,136,136,136,136,136,136,136,136,136,136,136,136,136,]),'and_expression':([262,377,151,342,408,394,198,137,328,60,219,381,228,367,47,257,158,224,369,276,197,141,240,183,263,264,346,266,384,289,174,176,186,354,409,215,216,55,218,166,378,267,72,256,140,380,284,382,272,147,406,],[106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,302,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,]),'assignment_expression':([276,394,272,216,147,367,256,380,158,262,266,264,342,408,263,183,381,406,166,377,72,140,60,378,409,215,240,346,267,186,384,137,257,141,382,219,],[275,160,160,160,160,390,341,160,160,343,160,160,160,160,160,160,160,160,275,160,160,160,160,160,160,319,160,160,160,160,275,160,
 160,160,160,160,]),'external_declaration':([1,],[15,]),'type_specifier':([237,140,28,172,167,81,221,59,47,286,3,60,87,137,27,64,231,327,66,54,72,278,168,169,1,186,2,],[28,28,28,81,28,81,28,28,81,81,28,28,81,81,28,28,28,28,81,28,81,81,81,81,28,81,28,]),'compound_statement':([140,60,378,394,147,408,406,346,59,263,382,158,267,167,240,27,64,380,],[143,143,143,143,143,143,143,143,133,143,143,143,143,277,143,56,165,143,]),'inclusive_or_expression':([60,382,47,218,409,183,272,228,377,408,328,224,166,380,284,55,186,289,151,256,346,369,367,147,240,216,266,262,158,267,276,378,219,198,140,72,406,381,354,263,257,394,342,384,264,215,141,137,174,],[71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,303,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,]),'enumerator_list_iso':([118,53,],[122,122,]),'selection_statement':([408,346,394,378,263,267,147,240,158,406,140,60,380,382,],[159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'postfix_
 expression':([197,394,192,137,328,194,263,378,141,151,60,189,369,380,219,207,262,240,193,228,406,257,377,342,266,381,183,202,147,190,346,47,367,204,176,408,224,293,384,79,276,208,272,201,210,409,166,203,267,186,72,216,140,198,188,55,289,264,218,382,354,174,76,113,284,256,158,80,215,191,],[109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,]),'asm_expression':([151,215,409,210,276,377,183,141,378,224,80,198,192,166,262,194,72,113,406,257,189,384,394,219,354,263,240,346,284,60,207,201,203,408,382,174,264,272,176,204,266,197,216,193,328,267,190,369,228,186,381,256,147,342,380,188,158,47,55,293,208,218,289,76,79,137,191,140,202,367,],[110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,1
 10,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,110,]),'declaration_impl':([167,27,64,59,1,60,140,],[19,19,19,19,19,19,19,]),'type_name':([186,137,72,47,],[294,178,178,88,]),'relational_expression':([276,408,378,289,216,264,382,183,263,380,224,240,377,219,262,409,158,197,210,346,394,166,55,384,381,257,207,147,267,369,215,367,198,186,174,354,72,272,328,47,60,137,141,140,406,218,284,208,176,256,266,342,151,228,],[96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,311,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,312,96,96,96,96,96,96,]),'statement':([158,60,394,140,267,380,263,147,408,240,382,406,346,378,],[271,153,405,153,348,397,344,259,415,259,399,413,379,396,]),'parameter_declaration':([54,327,231,237,221,],[126,126,126,337,126,]),'cast_expression':([186,293,266,406,174,224,384,202,257,189,240,113,218,193,262,55,377,198,166,216
 ,264,194,137,382,203,276,378,215,210,219,267,197,147,381,354,409,158,208,176,256,342,188,141,207,192,272,72,204,284,228,191,346,289,394,151,408,369,60,140,47,183,263,380,328,201,190,367,],[97,363,97,97,97,97,97,97,97,97,97,220,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,97,299,97,97,97,97,97,298,97,97,97,97,97,97,97,97,97,97,97,97,97,97,297,97,]),'init_declarator':([29,164,57,],[62,273,62,]),'struct_declarator_list':([171,],[285,]),'multiplicative_expression':([384,367,60,55,47,166,201,406,346,204,197,72,228,203,409,262,198,381,219,194,263,207,377,257,378,137,276,210,202,189,267,158,342,256,208,218,140,141,394,188,380,183,382,328,224,272,264,174,216,289,186,354,408,215,151,369,193,147,266,240,176,284,],[85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,296,85,85,85,85,85,85,85,85,85,295,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,]),'unary_operator':([284,141,176,224,3
 69,384,293,276,409,240,201,210,194,354,203,186,72,60,189,380,188,198,55,219,218,202,207,382,190,406,76,204,113,257,256,147,158,377,151,215,208,272,137,408,183,166,262,378,267,47,140,79,191,192,193,228,264,342,80,266,197,394,216,328,381,289,263,346,174,367,],[113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,]),'assignment_operator':([142,],[256,]),'struct_or_union':([59,140,286,3,27,327,167,72,137,237,172,81,231,60,2,221,169,66,47,186,54,168,1,28,87,64,278,],[40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,]),'type_qualifier_list':([7,],[43,]),'enumerator':([118,229,53,],[119,334,119,]),'string_literal':([203,276,210,381,389,216,256,151,176,367,202,421,382,80,377,219,412,408,380,266,354,201,394,207,406,272,192,411,26
 2,218,369,189,224,346,263,183,198,158,215,47,240,72,190,140,228,60,384,304,188,264,378,208,191,267,409,166,289,76,204,193,113,79,257,141,137,147,55,194,74,293,342,328,174,186,284,197,],[84,84,84,84,402,84,84,84,84,84,84,402,84,84,84,84,402,84,84,84,84,84,84,84,84,84,84,402,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,365,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,181,84,84,84,84,84,84,84,]),'parameter_type_list':([221,327,54,231,],[325,373,125,325,]),'pointer':([29,164,231,171,127,16,57,43,221,7,357,1,114,],[14,14,234,14,234,14,14,69,225,45,14,14,225,]),'direct_declarator':([29,1,14,16,357,231,57,164,127,171,234,],[26,26,49,26,26,26,26,26,26,26,49,]),'declarator':([127,164,1,231,16,357,57,171,29,],[233,132,27,50,50,282,132,282,64,]),'argument_expression_list':([215,],[320,]),'str_opt_expr_pair_list':([412,421,389,],[418,422,404,]),'direct_abstract_declarator':([114,225,234,231,127,221,],[223,331,331,223,223,223,]),'additive_expression':([377,289,
 262,207,272,47,257,346,264,328,394,218,208,256,147,408,381,224,367,266,216,382,183,369,276,342,186,201,60,174,193,141,176,240,409,197,203,204,210,72,263,384,219,202,284,378,354,137,380,194,140,166,267,151,215,406,158,198,55,228,],[83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,300,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,301,83,83,83,83,83,83,83,83,83,83,]),'constant_expression':([354,289,151,174,284,47,224,55,228,328,],[385,361,265,288,355,98,329,131,333,375,]),'primary_expression':([166,176,293,266,408,240,147,228,158,276,267,191,218,204,367,55,289,208,256,257,72,284,224,79,328,194,219,377,197,198,192,216,141,342,76,394,151,272,380,202,381,378,264,262,113,409,369,80,137,382,215,210,346,186,384,174,203,354,207,201,190,193,47,188,189,60,140,183,263,406,],[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100
 ,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,]),'declaration_specifiers':([59,237,28,327,60,140,54,1,167,3,231,64,27,2,221,],[57,127,61,127,57,57,127,29,57,42,127,57,57,41,127,]),'declaration':([59,27,60,167,1,140,64,],[134,58,58,134,30,134,58,]),'logical_and_expression':([328,406,60,262,140,141,256,284,354,218,183,263,384,174,224,369,267,381,147,378,166,394,276,72,342,55,47,346,382,137,272,264,377,409,380,216,228,289,219,186,266,240,367,408,215,257,151,158,],[92,92,92,92,92,92,92,92,92,322,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,]),'init_declarator_list':([29,57,],[63,63,]),'shift_expression':([176,208,186,384,224,228,382,276,151,72,377,216,342,201,367,380,140,141,409,406,47,346,263,60,202,257,369,240,266,354,207,219,284,215,408,264,256,55,378,394,197,147,210,289,381,267,137,166,198,218,204,328,272,203,158,262,183,174,],[86,86,86,86,8
 6,86,86,86,86,86,86,86,86,305,86,86,86,86,86,86,86,86,86,86,306,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,308,86,86,307,86,86,86,86,]),'equality_expression':([381,284,224,378,137,380,266,216,166,267,276,151,346,158,198,55,60,272,289,257,186,328,406,176,409,147,197,408,210,384,367,183,354,218,342,140,215,369,174,228,377,382,262,141,240,47,264,72,263,394,256,219,],[101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,315,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,]),'jump_statement':([378,267,60,147,394,382,408,240,158,406,263,380,346,140,],[154,154,154,154,154,154,154,154,154,154,154,154,154,154,]),'struct_declarator':([171,357,],[281,386,]),'function_definition':([1,],[32,]),'parameter_list':([54,327,221,231,],[129,129,129,129,]),'enum_specifier':([87,231,64,27,327,66,47,59,140,168,54,172,1,137,167,3,169,60,72,278,28,186,237,286,81,2,22
 1,],[37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,37,]),'str_opt_expr_pair':([411,421,412,389,],[417,403,403,403,]),'enumerator_list':([118,53,],[226,120,]),'labeled_statement':([408,380,240,60,267,394,406,346,382,378,158,147,140,263,],[139,139,139,139,139,139,139,139,139,139,139,139,139,139,]),'logical_or_expression':([328,55,408,406,174,224,384,257,276,377,240,166,262,378,264,215,137,381,151,216,382,141,267,369,158,284,219,256,342,147,394,409,272,346,289,72,60,183,140,263,380,47,367,354,228,186,266,],[112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,]),'specifier_qualifier_list':([168,169,172,72,186,137,286,66,278,81,47,87,],[171,171,171,114,114,114,171,171,171,187,114,195,]),'multi_string_literal':([140,72,186,267,203,346,166,183,201,409,276,293,224,176,204,193,367,47,190,202,207,381,219,408,266,342,151
 ,264,263,194,406,228,240,354,147,192,197,80,380,369,189,60,257,256,141,284,378,218,55,328,137,158,198,394,216,215,262,210,113,76,174,272,208,382,191,289,79,188,384,377,],[74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,74,]),'translation_unit':([0,],[1,]),'initializer_list':([276,],[352,]),'conditional_expression':([266,257,240,141,215,272,183,147,328,394,216,354,406,72,140,378,369,47,276,267,384,367,263,174,151,137,289,186,408,342,158,228,219,262,380,382,264,166,409,224,377,381,346,256,55,284,60,],[162,162,162,162,162,162,162,162,115,162,162,115,162,162,162,162,391,115,162,162,162,162,162,115,115,162,115,162,162,162,162,115,162,162,162,162,162,162,162,115,162,162,162,162,115,115,162,]),'statement_list':([140,60,],[240,147,]),'identifier':([408,293,342,208,203,218,188,183,263,380,60,140,207,266,267,141,176,256,240,284,
 210,113,193,369,224,219,55,381,204,197,406,346,354,206,147,137,377,158,378,189,192,272,216,47,384,209,166,394,276,174,409,367,190,382,328,202,198,257,80,264,79,228,72,201,289,191,186,262,215,194,76,151,],[75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,310,75,75,75,75,75,75,75,75,75,75,75,313,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,75,]),'expression':([257,266,378,394,158,140,381,409,342,216,72,380,219,406,408,264,186,377,267,272,147,183,346,141,60,382,137,240,263,],[148,347,148,148,148,148,398,416,148,321,177,148,323,148,148,345,177,395,148,350,148,177,148,244,148,148,177,148,148,]),}
+
+_lr_goto = { }
+for _k, _v in _lr_goto_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       _lr_goto[(_x,_k)] = _y
+del _lr_goto_items
+_lr_productions = [
+  ("S'",1,None,None,None),
+  ('translation_unit',0,'p_translation_unit','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',57),
+  ('translation_unit',2,'p_translation_unit','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',58),
+  ('translation_unit',2,'p_translation_unit','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',59),
+  ('identifier',1,'p_identifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',67),
+  ('identifier',3,'p_identifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',68),
+  ('identifier',3,'p_identifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',69),
+  ('identifier',3,'p_identifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',70),
+  ('identifier',3,'p_identifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',71),
+  ('constant',1,'p_constant','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',87),
+  ('constant',1,'p_constant','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',88),
+  ('string_literal',1,'p_string_literal','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',111),
+  ('multi_string_literal',1,'p_multi_string_literal','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',115),
+  ('multi_string_literal',1,'p_multi_string_literal','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',116),
+  ('multi_string_literal',2,'p_multi_string_literal','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',117),
+  ('multi_string_literal',2,'p_multi_string_literal','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',118),
+  ('macro_param',1,'p_macro_param','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',127),
+  ('macro_param',2,'p_macro_param','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',128),
+  ('primary_expression',1,'p_primary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',136),
+  ('primary_expression',1,'p_primary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',137),
+  ('primary_expression',1,'p_primary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',138),
+  ('primary_expression',3,'p_primary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',139),
+  ('postfix_expression',1,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',147),
+  ('postfix_expression',4,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',148),
+  ('postfix_expression',3,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',149),
+  ('postfix_expression',4,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',150),
+  ('postfix_expression',3,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',151),
+  ('postfix_expression',3,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',152),
+  ('postfix_expression',2,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',153),
+  ('postfix_expression',2,'p_postfix_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',154),
+  ('argument_expression_list',1,'p_argument_expression_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',187),
+  ('argument_expression_list',3,'p_argument_expression_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',188),
+  ('asm_expression',5,'p_asm_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',197),
+  ('asm_expression',7,'p_asm_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',198),
+  ('asm_expression',9,'p_asm_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',199),
+  ('asm_expression',11,'p_asm_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',200),
+  ('str_opt_expr_pair_list',0,'p_str_opt_expr_pair_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',212),
+  ('str_opt_expr_pair_list',1,'p_str_opt_expr_pair_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',213),
+  ('str_opt_expr_pair_list',3,'p_str_opt_expr_pair_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',214),
+  ('str_opt_expr_pair',1,'p_str_opt_expr_pair','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',218),
+  ('str_opt_expr_pair',4,'p_str_opt_expr_pair','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',219),
+  ('volatile_opt',0,'p_volatile_opt','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',223),
+  ('volatile_opt',1,'p_volatile_opt','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',224),
+  ('unary_expression',1,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',239),
+  ('unary_expression',2,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',240),
+  ('unary_expression',2,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',241),
+  ('unary_expression',2,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',242),
+  ('unary_expression',2,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',243),
+  ('unary_expression',4,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',244),
+  ('unary_expression',1,'p_unary_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',245),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',262),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',263),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',264),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',265),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',266),
+  ('unary_operator',1,'p_unary_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',267),
+  ('cast_expression',1,'p_cast_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',272),
+  ('cast_expression',4,'p_cast_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',273),
+  ('multiplicative_expression',1,'p_multiplicative_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',287),
+  ('multiplicative_expression',3,'p_multiplicative_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',288),
+  ('multiplicative_expression',3,'p_multiplicative_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',289),
+  ('multiplicative_expression',3,'p_multiplicative_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',290),
+  ('additive_expression',1,'p_additive_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',305),
+  ('additive_expression',3,'p_additive_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',306),
+  ('additive_expression',3,'p_additive_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',307),
+  ('shift_expression',1,'p_shift_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',322),
+  ('shift_expression',3,'p_shift_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',323),
+  ('shift_expression',3,'p_shift_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',324),
+  ('relational_expression',1,'p_relational_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',341),
+  ('relational_expression',3,'p_relational_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',342),
+  ('relational_expression',3,'p_relational_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',343),
+  ('relational_expression',3,'p_relational_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',344),
+  ('relational_expression',3,'p_relational_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',345),
+  ('equality_expression',1,'p_equality_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',360),
+  ('equality_expression',3,'p_equality_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',361),
+  ('equality_expression',3,'p_equality_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',362),
+  ('and_expression',1,'p_and_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',372),
+  ('and_expression',3,'p_and_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',373),
+  ('exclusive_or_expression',1,'p_exclusive_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',382),
+  ('exclusive_or_expression',3,'p_exclusive_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',383),
+  ('inclusive_or_expression',1,'p_inclusive_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',392),
+  ('inclusive_or_expression',3,'p_inclusive_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',393),
+  ('logical_and_expression',1,'p_logical_and_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',402),
+  ('logical_and_expression',3,'p_logical_and_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',403),
+  ('logical_or_expression',1,'p_logical_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',412),
+  ('logical_or_expression',3,'p_logical_or_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',413),
+  ('conditional_expression',1,'p_conditional_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',422),
+  ('conditional_expression',5,'p_conditional_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',423),
+  ('assignment_expression',1,'p_assignment_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',444),
+  ('assignment_expression',3,'p_assignment_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',445),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',460),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',461),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',462),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',463),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',464),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',465),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',466),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',467),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',468),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',469),
+  ('assignment_operator',1,'p_assignment_operator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',470),
+  ('expression',1,'p_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',475),
+  ('expression',3,'p_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',476),
+  ('constant_expression',1,'p_constant_expression','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',482),
+  ('declaration',2,'p_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',487),
+  ('declaration_impl',1,'p_declaration_impl','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',494),
+  ('declaration_impl',2,'p_declaration_impl','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',495),
+  ('declaration_specifiers',1,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',519),
+  ('declaration_specifiers',2,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',520),
+  ('declaration_specifiers',1,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',521),
+  ('declaration_specifiers',2,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',522),
+  ('declaration_specifiers',1,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',523),
+  ('declaration_specifiers',2,'p_declaration_specifiers','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',524),
+  ('init_declarator_list',1,'p_init_declarator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',532),
+  ('init_declarator_list',3,'p_init_declarator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',533),
+  ('init_declarator',1,'p_init_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',541),
+  ('init_declarator',3,'p_init_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',542),
+  ('storage_class_specifier',1,'p_storage_class_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',549),
+  ('storage_class_specifier',1,'p_storage_class_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',550),
+  ('storage_class_specifier',1,'p_storage_class_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',551),
+  ('storage_class_specifier',1,'p_storage_class_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',552),
+  ('storage_class_specifier',1,'p_storage_class_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',553),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',558),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',559),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',560),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',561),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',562),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',563),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',564),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',565),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',566),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',567),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',568),
+  ('type_specifier',1,'p_type_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',569),
+  ('struct_or_union_specifier',5,'p_struct_or_union_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',578),
+  ('struct_or_union_specifier',5,'p_struct_or_union_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',579),
+  ('struct_or_union_specifier',4,'p_struct_or_union_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',580),
+  ('struct_or_union_specifier',2,'p_struct_or_union_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',581),
+  ('struct_or_union_specifier',2,'p_struct_or_union_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',582),
+  ('struct_or_union',1,'p_struct_or_union','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',598),
+  ('struct_or_union',1,'p_struct_or_union','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',599),
+  ('struct_declaration_list',1,'p_struct_declaration_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',604),
+  ('struct_declaration_list',2,'p_struct_declaration_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',605),
+  ('struct_declaration',3,'p_struct_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',613),
+  ('struct_declaration',2,'p_struct_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',614),
+  ('specifier_qualifier_list',2,'p_specifier_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',628),
+  ('specifier_qualifier_list',1,'p_specifier_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',629),
+  ('specifier_qualifier_list',2,'p_specifier_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',630),
+  ('specifier_qualifier_list',1,'p_specifier_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',631),
+  ('struct_declarator_list',1,'p_struct_declarator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',640),
+  ('struct_declarator_list',3,'p_struct_declarator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',641),
+  ('struct_declarator',1,'p_struct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',649),
+  ('struct_declarator',2,'p_struct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',650),
+  ('struct_declarator',3,'p_struct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',651),
+  ('enum_specifier',4,'p_enum_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',662),
+  ('enum_specifier',5,'p_enum_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',663),
+  ('enum_specifier',2,'p_enum_specifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',664),
+  ('enumerator_list',1,'p_enumerator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',677),
+  ('enumerator_list',2,'p_enumerator_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',678),
+  ('enumerator_list_iso',1,'p_enumerator_list_iso','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',685),
+  ('enumerator_list_iso',3,'p_enumerator_list_iso','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',686),
+  ('enumerator',1,'p_enumerator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',694),
+  ('enumerator',3,'p_enumerator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',695),
+  ('type_qualifier',1,'p_type_qualifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',703),
+  ('type_qualifier',1,'p_type_qualifier','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',704),
+  ('declarator',2,'p_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',709),
+  ('declarator',1,'p_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',710),
+  ('direct_declarator',1,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',722),
+  ('direct_declarator',3,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',723),
+  ('direct_declarator',4,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',724),
+  ('direct_declarator',3,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',725),
+  ('direct_declarator',4,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',726),
+  ('direct_declarator',4,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',727),
+  ('direct_declarator',3,'p_direct_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',728),
+  ('pointer',1,'p_pointer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',757),
+  ('pointer',2,'p_pointer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',758),
+  ('pointer',2,'p_pointer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',759),
+  ('pointer',3,'p_pointer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',760),
+  ('type_qualifier_list',1,'p_type_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',777),
+  ('type_qualifier_list',2,'p_type_qualifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',778),
+  ('parameter_type_list',1,'p_parameter_type_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',786),
+  ('parameter_type_list',3,'p_parameter_type_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',787),
+  ('parameter_list',1,'p_parameter_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',796),
+  ('parameter_list',3,'p_parameter_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',797),
+  ('parameter_declaration',2,'p_parameter_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',805),
+  ('parameter_declaration',2,'p_parameter_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',806),
+  ('parameter_declaration',1,'p_parameter_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',807),
+  ('identifier_list',1,'p_identifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',815),
+  ('identifier_list',3,'p_identifier_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',816),
+  ('type_name',1,'p_type_name','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',828),
+  ('type_name',2,'p_type_name','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',829),
+  ('abstract_declarator',1,'p_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',845),
+  ('abstract_declarator',1,'p_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',846),
+  ('abstract_declarator',2,'p_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',847),
+  ('direct_abstract_declarator',3,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',866),
+  ('direct_abstract_declarator',2,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',867),
+  ('direct_abstract_declarator',3,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',868),
+  ('direct_abstract_declarator',3,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',869),
+  ('direct_abstract_declarator',4,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',870),
+  ('direct_abstract_declarator',2,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',871),
+  ('direct_abstract_declarator',3,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',872),
+  ('direct_abstract_declarator',3,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',873),
+  ('direct_abstract_declarator',4,'p_direct_abstract_declarator','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',874),
+  ('initializer',1,'p_initializer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',911),
+  ('initializer',3,'p_initializer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',912),
+  ('initializer',4,'p_initializer','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',913),
+  ('initializer_list',1,'p_initializer_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',917),
+  ('initializer_list',3,'p_initializer_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',918),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',922),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',923),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',924),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',925),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',926),
+  ('statement',1,'p_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',927),
+  ('labeled_statement',3,'p_labeled_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',931),
+  ('labeled_statement',4,'p_labeled_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',932),
+  ('labeled_statement',3,'p_labeled_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',933),
+  ('compound_statement',2,'p_compound_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',937),
+  ('compound_statement',3,'p_compound_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',938),
+  ('compound_statement',3,'p_compound_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',939),
+  ('compound_statement',4,'p_compound_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',940),
+  ('compound_statement',3,'p_compound_statement_error','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',944),
+  ('declaration_list',1,'p_declaration_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',949),
+  ('declaration_list',2,'p_declaration_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',950),
+  ('statement_list',1,'p_statement_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',954),
+  ('statement_list',2,'p_statement_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',955),
+  ('expression_statement',1,'p_expression_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',959),
+  ('expression_statement',2,'p_expression_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',960),
+  ('expression_statement',2,'p_expression_statement_error','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',963),
+  ('selection_statement',5,'p_selection_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',968),
+  ('selection_statement',7,'p_selection_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',969),
+  ('selection_statement',5,'p_selection_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',970),
+  ('iteration_statement',5,'p_iteration_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',974),
+  ('iteration_statement',7,'p_iteration_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',975),
+  ('iteration_statement',6,'p_iteration_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',976),
+  ('iteration_statement',7,'p_iteration_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',977),
+  ('jump_statement',3,'p_jump_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',981),
+  ('jump_statement',2,'p_jump_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',982),
+  ('jump_statement',2,'p_jump_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',983),
+  ('jump_statement',2,'p_jump_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',984),
+  ('jump_statement',3,'p_jump_statement','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',985),
+  ('external_declaration',1,'p_external_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',989),
+  ('external_declaration',1,'p_external_declaration','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',990),
+  ('function_definition',4,'p_function_definition','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',996),
+  ('function_definition',3,'p_function_definition','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',997),
+  ('function_definition',3,'p_function_definition','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',998),
+  ('function_definition',2,'p_function_definition','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',999),
+  ('define',3,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1003),
+  ('define',4,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1004),
+  ('define',4,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1005),
+  ('define',5,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1006),
+  ('define',6,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1007),
+  ('define',6,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1008),
+  ('define',7,'p_define','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1009),
+  ('define',3,'p_define_error','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1042),
+  ('macro_parameter_list',1,'p_macro_parameter_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1071),
+  ('macro_parameter_list',3,'p_macro_parameter_list','/Users/tim/Desktop/ctypesgen/ctypesgencore/parser/cgrammar.py',1072),
+]

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/pplexer.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/pplexer.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/pplexer.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,295 @@
+#!/usr/bin/env python
+
+'''Preprocess a C source file using gcc and convert the result into
+   a token stream
+
+Reference is C99:
+  * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf
+
+'''
+
+__docformat__ = 'restructuredtext'
+
+import os, re, shlex, sys, tokenize, lex, yacc, traceback
+import ctypes
+from lex import TOKEN
+
+tokens = (
+    'HEADER_NAME', 'IDENTIFIER', 'PP_NUMBER', 'CHARACTER_CONSTANT',
+    'STRING_LITERAL', 'OTHER',
+
+    'PTR_OP', 'INC_OP', 'DEC_OP', 'LEFT_OP', 'RIGHT_OP', 'LE_OP', 'GE_OP',
+    'EQ_OP', 'NE_OP', 'AND_OP', 'OR_OP', 'MUL_ASSIGN', 'DIV_ASSIGN',
+    'MOD_ASSIGN', 'ADD_ASSIGN', 'SUB_ASSIGN', 'LEFT_ASSIGN', 'RIGHT_ASSIGN',
+    'AND_ASSIGN', 'XOR_ASSIGN', 'OR_ASSIGN',  'PERIOD', 'ELLIPSIS',
+
+    'LPAREN', 'NEWLINE',
+    
+    'PP_DEFINE', 'PP_DEFINE_NAME', 'PP_DEFINE_MACRO_NAME', 'PP_MACRO_PARAM',
+    'PP_STRINGIFY', 'PP_IDENTIFIER_PASTE', 'PP_END_DEFINE'
+)
+
+states = [('DEFINE',"exclusive")]
+
+subs = {
+    'D': '[0-9]',
+    'L': '[a-zA-Z_]',
+    'H': '[a-fA-F0-9]',
+    'E': '[Ee][+-]?\s*{D}+',
+    'FS': '[FflL]',
+    'IS': '[uUlL]*',
+}
+# Helper: substitute {foo} with subs[foo] in string (makes regexes more lexy)
+sub_pattern = re.compile('{([^}]*)}')
+def sub_repl_match(m):
+    return subs[m.groups()[0]]
+def sub(s):
+    return sub_pattern.sub(sub_repl_match, s)
+
+# --------------------------------------------------------------------------
+# Token value types
+# --------------------------------------------------------------------------
+
+# Numbers represented as int and float types.
+# For all other tokens, type is just str representation.
+
+class StringLiteral(str):
+    def __new__(cls, value):
+        assert value[0] == '"' and value[-1] == '"'
+        # Unescaping probably not perfect but close enough.
+        try:
+            value = value[1:-1].decode('string_escape')
+        except ValueError, e:
+            try:
+                value = re.sub(r'\\x([0-9a-fA-F])(?![0-9a-fA-F])',
+                               r'\x0\1',
+                               value[1:-1]).decode('string_escape')
+            except ValueError, e:
+                raise ValueError("invalid \\x escape in %s" % value)
+        return str.__new__(cls, value)
+
+# --------------------------------------------------------------------------
+# Token declarations
+# --------------------------------------------------------------------------
+
+punctuators = {
+    # value: (regex, type)
+    r'...': (r'\.\.\.', 'ELLIPSIS'),
+    r'>>=': (r'>>=', 'RIGHT_ASSIGN'),
+    r'<<=': (r'<<=', 'LEFT_ASSIGN'),
+    r'+=': (r'\+=', 'ADD_ASSIGN'),
+    r'-=': (r'-=', 'SUB_ASSIGN'),
+    r'*=': (r'\*=', 'MUL_ASSIGN'),
+    r'/=': (r'/=', 'DIV_ASSIGN'),
+    r'%=': (r'%=', 'MOD_ASSIGN'),
+    r'&=': (r'&=', 'AND_ASSIGN'),
+    r'^=': (r'\^=', 'XOR_ASSIGN'),
+    r'|=': (r'\|=', 'OR_ASSIGN'),
+    r'>>': (r'>>', 'RIGHT_OP'),
+    r'<<': (r'<<', 'LEFT_OP'),
+    r'++': (r'\+\+', 'INC_OP'),
+    r'--': (r'--', 'DEC_OP'),
+    r'->': (r'->', 'PTR_OP'),
+    r'&&': (r'&&', 'AND_OP'),
+    r'||': (r'\|\|', 'OR_OP'),
+    r'<=': (r'<=', 'LE_OP'),
+    r'>=': (r'>=', 'GE_OP'),
+    r'==': (r'==', 'EQ_OP'),
+    r'!=': (r'!=', 'NE_OP'),
+    r'<:': (r'<:', '['),
+    r':>': (r':>', ']'),
+    r'<%': (r'<%', '{'),
+    r'%>': (r'%>', '}'),
+    r';': (r';', ';'),
+    r'{': (r'{', '{'),
+    r'}': (r'}', '}'),
+    r',': (r',', ','),
+    r':': (r':', ':'),
+    r'=': (r'=', '='),
+    r')': (r'\)', ')'),
+    r'[': (r'\[', '['),
+    r']': (r']', ']'),
+    r'.': (r'\.', 'PERIOD'),
+    r'&': (r'&', '&'),
+    r'!': (r'!', '!'),
+    r'~': (r'~', '~'),
+    r'-': (r'-', '-'),
+    r'+': (r'\+', '+'),
+    r'*': (r'\*', '*'),
+    r'/': (r'/', '/'),
+    r'%': (r'%', '%'),
+    r'<': (r'<', '<'),
+    r'>': (r'>', '>'),
+    r'^': (r'\^', '^'),
+    r'|': (r'\|', '|'),
+    r'?': (r'\?', '?')
+}
+
+def punctuator_regex(punctuators):
+    punctuator_regexes = [v[0] for v in punctuators.values()]
+    punctuator_regexes.sort(lambda a, b: -cmp(len(a), len(b)))
+    return '(%s)' % '|'.join(punctuator_regexes)
+
+# Process line-number directives from the preprocessor
+# See http://docs.freebsd.org/info/cpp/cpp.info.Output.html
+DIRECTIVE = r'\#\s+(\d+)\s+"([^"]+)"[ \d]*\n'
+ at TOKEN(DIRECTIVE)
+def t_ANY_directive(t):
+    t.lexer.filename = t.groups[2]
+    t.lexer.lineno = int(t.groups[1])
+    return None
+
+ at TOKEN(punctuator_regex(punctuators))
+def t_ANY_punctuator(t):
+    t.type = punctuators[t.value][1]
+    return t
+
+IDENTIFIER = sub('{L}({L}|{D})*')
+ at TOKEN(IDENTIFIER)
+def t_INITIAL_identifier(t):
+    t.type = 'IDENTIFIER'
+    return t
+
+ at TOKEN(IDENTIFIER)
+def t_DEFINE_identifier(t):
+    if t.lexer.next_is_define_name:
+        # This identifier is the name of a macro
+        # We need to look ahead and see if this macro takes parameters or not.
+        if t.lexpos + len(t.value) < t.lexer.lexlen and \
+            t.lexer.lexdata[t.lexpos + len(t.value)] == '(':
+            
+            t.type = 'PP_DEFINE_MACRO_NAME'
+            
+            # Look ahead and read macro parameter list
+            lexdata = t.lexer.lexdata
+            pos = t.lexpos + len(t.value) + 1
+            while lexdata[pos] not in '\n)':
+                pos+=1
+            params = lexdata[t.lexpos+len(t.value)+1 : pos]
+            paramlist = [x.strip() for x in params.split(",") if x.strip()]
+            t.lexer.macro_params = paramlist
+                    
+        else:
+            t.type = 'PP_DEFINE_NAME'
+        
+        t.lexer.next_is_define_name = False
+    elif t.value in t.lexer.macro_params:
+        t.type = 'PP_MACRO_PARAM'
+    else:
+        t.type = 'IDENTIFIER'
+    return t
+
+FLOAT_LITERAL = sub(r"(?P<p1>{D}+)?(?P<dp>[.]?)(?P<p2>(?(p1){D}*|{D}+))" \
+                    r"(?P<exp>(?:[Ee][+-]?{D}+)?)(?P<suf>{FS}?)(?!\w)")
+ at TOKEN(FLOAT_LITERAL)
+def t_ANY_float(t):
+    t.type = 'PP_NUMBER'
+    m = t.lexer.lexmatch
+    
+    p1 = m.group("p1")
+    dp = m.group("dp")
+    p2 = m.group("p2")
+    exp = m.group("exp")
+    suf = m.group("suf")
+    
+    if dp or exp or (suf and suf in ("Ff")):
+        s = m.group(0)
+        if suf:
+            s = s[:-1]
+        # Attach a prefix so the parser can figure out if should become an
+        # integer, float, or long
+        t.value = "f" + s
+    elif (suf and suf in ("Ll")):
+        t.value = "l" + p1
+    else:
+        t.value = "i" + p1
+        
+    return t
+
+INT_LITERAL = sub(r"(?P<p1>(?:0x{H}+)|(?:{D}+))(?P<suf>{IS})")
+ at TOKEN(INT_LITERAL)
+def t_ANY_int(t):
+    t.type = 'PP_NUMBER'
+    m = t.lexer.lexmatch
+    
+    if "L" in m.group(3) or "l" in m.group(2):
+        prefix = "l"
+    else:
+        prefix = "i"
+    
+    g1 = m.group(2)
+    if g1.startswith("0x"):
+        # Convert base from hexadecimal
+        g1 = str(long(g1[2:],16))
+    elif g1[0]=="0":
+        # Convert base from octal
+        g1 = str(long(g1,8))
+    
+    t.value = prefix + g1
+        
+    return t
+
+CHARACTER_CONSTANT = sub(r"L?'(\\.|[^\\'])+'")
+ at TOKEN(CHARACTER_CONSTANT)
+def t_ANY_character_constant(t):
+    t.type = 'CHARACTER_CONSTANT'
+    return t
+
+STRING_LITERAL = sub(r'L?"(\\.|[^\\"])*"')
+ at TOKEN(STRING_LITERAL)
+def t_ANY_string_literal(t):
+    t.type = 'STRING_LITERAL'
+    t.value = StringLiteral(t.value)
+    return t
+
+ at TOKEN(r'\(')
+def t_ANY_lparen(t):
+    if t.lexpos == 0 or t.lexer.lexdata[t.lexpos-1] not in (' \t\f\v\n'):
+        t.type = 'LPAREN'
+    else:
+        t.type = '('
+    return t
+
+ at TOKEN(r'\n')
+def t_INITIAL_newline(t):
+    t.lexer.lineno += 1
+    return None
+
+ at TOKEN(r'\#define')
+def t_INITIAL_pp_define(t):
+    t.type = 'PP_DEFINE'
+    t.lexer.begin("DEFINE")
+    t.lexer.next_is_define_name = True
+    t.lexer.macro_params = set()
+    return t
+
+ at TOKEN(r'\n')
+def t_DEFINE_newline(t):
+    t.type = 'PP_END_DEFINE'
+    t.lexer.begin("INITIAL")
+    del t.lexer.macro_params
+    
+    # Damage control in case the token immediately after the #define failed
+    # to handle this
+    t.lexer.next_is_define_name = False
+    return t
+
+ at TOKEN(r'(\#\#)|(\#)')
+def t_DEFINE_pp_param_op(t):
+    if t.value=='#':
+        t.type = 'PP_STRINGIFY'
+    else:
+        t.type = 'PP_IDENTIFIER_PASTE'
+    return t
+
+def t_INITIAL_error(t):
+    t.type = 'OTHER'
+    return t
+
+def t_DEFINE_error(t):
+    t.type = 'OTHER'
+    t.value = t.value[0]
+    t.lexer.lexpos+=1 # Skip it if it's an error in a #define
+    return t
+
+t_ANY_ignore = ' \t\v\f\r'

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/preprocessor.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/preprocessor.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/preprocessor.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+
+'''Preprocess a C source file using gcc and convert the result into
+   a token stream
+
+Reference is C99:
+  * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf
+
+'''
+
+__docformat__ = 'restructuredtext'
+
+import os, re, shlex, sys, tokenize, lex, yacc, traceback, subprocess
+import ctypes
+from lex import TOKEN
+import pplexer
+
+# --------------------------------------------------------------------------
+# Lexers
+# --------------------------------------------------------------------------
+
+class PreprocessorLexer(lex.Lexer):
+    def __init__(self):
+        lex.Lexer.__init__(self)
+        self.filename = '<input>'
+        self.in_define = False
+
+    def input(self, data, filename=None):
+        if filename:
+            self.filename = filename 
+        self.lasttoken = None
+        self.input_stack = []
+
+        lex.Lexer.input(self, data)
+
+    def push_input(self, data, filename):
+        self.input_stack.append(
+            (self.lexdata, self.lexpos, self.filename, self.lineno))
+        self.lexdata = data
+        self.lexpos = 0
+        self.lineno = 1
+        self.filename = filename
+        self.lexlen = len(self.lexdata)
+
+    def pop_input(self):
+        self.lexdata, self.lexpos, self.filename, self.lineno = \
+            self.input_stack.pop()
+        self.lexlen = len(self.lexdata)
+
+    def token(self):
+        result = lex.Lexer.token(self)
+        while result is None and self.input_stack:
+            self.pop_input()
+            result = lex.Lexer.token(self)
+
+        if result:
+            self.lasttoken = result.type
+            result.filename = self.filename
+        else:
+            self.lasttoken = None
+
+        return result
+
+class TokenListLexer(object):
+    def __init__(self, tokens):
+        self.tokens = tokens
+        self.pos = 0
+
+    def token(self):
+        if self.pos < len(self.tokens):
+            t = self.tokens[self.pos]
+            self.pos += 1
+            return t
+        else:
+            return None
+
+def symbol_to_token(sym):
+    if isinstance(sym, yacc.YaccSymbol):
+        return sym.value
+    elif isinstance(sym, lex.LexToken):
+        return sym
+    else:
+        assert False, 'Not a symbol: %r' % sym
+
+def create_token(type, value, production=None):
+    '''Create a token of type and value, at the position where 'production'
+    was reduced.  Don't specify production if the token is built-in'''
+    t = lex.LexToken()
+    t.type = type
+    t.value = value
+    t.lexpos = -1
+    if production:
+        t.lineno = production.slice[1].lineno
+        t.filename = production.slice[1].filename
+    else:
+        t.lineno = -1
+        t.filename = '<builtin>'
+    return t
+
+# --------------------------------------------------------------------------
+# Grammars
+# --------------------------------------------------------------------------
+
+class PreprocessorParser(object):
+    def __init__(self,options,cparser):
+        self.defines = ["inline=", "__inline__=", "__extension__=",
+                        "_Bool=uint8_t", "__const=const", "__asm__(x)=",
+                        "__asm(x)=", "CTYPESGEN=1"]
+
+        # On OSX, explicitly add these defines to keep from getting syntax
+        # errors in the OSX standard headers.
+        if hasattr(os, 'uname') and os.uname()[0] == 'Darwin':
+            self.defines += ["__uint16_t=uint16_t",
+                             "__uint32_t=uint32_t",
+                             "__uint64_t=uint64_t"]
+
+        self.matches = []
+        self.output = []
+        self.lexer = lex.lex(cls=PreprocessorLexer,
+                             optimize=1,
+                             lextab='lextab',
+                             outputdir=os.path.dirname(__file__),
+                             module=pplexer)
+        
+        self.options = options
+        self.cparser = cparser # An instance of CParser
+
+    def parse(self, filename):
+        """Parse a file and save its output"""
+        
+        cmd = self.options.cpp
+        if sys.platform == 'darwin':
+            cmd += " -U __BLOCKS__"
+        cmd += " -U __GNUC__"
+        cmd += " -dD"
+        for path in self.options.include_search_paths:
+            cmd += " -I%s" % path 
+        for define in self.defines:
+            cmd += ' "-D%s"' % define
+        cmd += " " + filename
+
+        self.cparser.handle_status(cmd)
+        
+        pp = subprocess.Popen(cmd,
+                              shell = True,
+                              stdout = subprocess.PIPE,
+                              stderr = subprocess.PIPE)
+        ppout, pperr = pp.communicate()
+        
+        for line in pperr.split("\n"):
+            if line:
+                self.cparser.handle_pp_error(line)
+        
+        # We separate lines that are #defines and lines that are source code
+        # We put all the source lines first, then all the #define lines.
+        
+        source_lines= []
+        define_lines = []
+        
+        for line in ppout.split("\n"):
+            line = line + "\n"
+            if line.startswith("# "):
+                # Line number information has to go with both groups
+                source_lines.append(line)
+                define_lines.append(line)
+            
+            elif line.startswith("#define"):
+                source_lines.append("\n")
+                define_lines.append(line)
+            
+            elif line.startswith("#"):
+                # It's a directive, but not a #define. Remove it
+                source_lines.append("\n")
+                define_lines.append("\n")
+            
+            else:
+                source_lines.append(line)
+                define_lines.append("\n")
+        
+        text = "".join(source_lines + define_lines)
+        
+        if self.options.save_preprocessed_headers:
+            self.cparser.handle_status("Saving preprocessed headers to %s." % \
+                self.options.save_preprocessed_headers)
+            try:
+                f = file(self.options.save_preprocessed_headers, "w")
+                f.write(text)
+                f.close()
+            except IOError:
+                self.cparser.handle_error("Couldn't save headers.")
+        
+        self.lexer.input(text)
+        self.output = []
+        
+        while True:
+            token = self.lexer.token()
+            if token is not None:
+                self.output.append(token)
+            else:
+                break

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/yacc.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/yacc.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/parser/yacc.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,2261 @@
+#-----------------------------------------------------------------------------
+# ply: yacc.py
+#
+# Author(s): David M. Beazley (dave at dabeaz.com)
+# Modifications for pyglet by Alex Holkner (alex.holkner at gmail.com) (<ah>)
+# Modifications for ctypesgen by Tim Maxwell (timmaxw at gmail.com) (<tm>)
+#
+# Copyright (C) 2001-2006, David M. Beazley
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+# 
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+# 
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+# 
+# See the file COPYING for a complete copy of the LGPL.
+#
+#
+# This implements an LR parser that is constructed from grammar rules defined
+# as Python functions. The grammer is specified by supplying the BNF inside
+# Python documentation strings.  The inspiration for this technique was borrowed
+# from John Aycock's Spark parsing system.  PLY might be viewed as cross between
+# Spark and the GNU bison utility.
+#
+# The current implementation is only somewhat object-oriented. The
+# LR parser itself is defined in terms of an object (which allows multiple
+# parsers to co-exist).  However, most of the variables used during table
+# construction are defined in terms of global variables.  Users shouldn't
+# notice unless they are trying to define multiple parsers at the same
+# time using threads (in which case they should have their head examined).
+#
+# This implementation supports both SLR and LALR(1) parsing.  LALR(1)
+# support was originally implemented by Elias Ioup (ezioup at alumni.uchicago.edu),
+# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
+# Techniques, and Tools" (The Dragon Book).  LALR(1) has since been replaced
+# by the more efficient DeRemer and Pennello algorithm.
+#
+# :::::::: WARNING :::::::
+#
+# Construction of LR parsing tables is fairly complicated and expensive.
+# To make this module run fast, a *LOT* of work has been put into
+# optimization---often at the expensive of readability and what might
+# consider to be good Python "coding style."   Modify the code at your
+# own risk!
+# ----------------------------------------------------------------------------
+
+__version__ = "2.2"
+
+#-----------------------------------------------------------------------------
+#                     === User configurable parameters ===
+#
+# Change these to modify the default behavior of yacc (if you wish)
+#-----------------------------------------------------------------------------
+
+yaccdebug   = 1                # Debugging mode.  If set, yacc generates a
+                               # a 'parser.out' file in the current directory
+
+debug_file  = 'parser.out'     # Default name of the debugging file
+tab_module  = 'parsetab'       # Default name of the table module
+default_lr  = 'LALR'           # Default LR table generation method
+
+error_count = 3                # Number of symbols that must be shifted to leave recovery mode
+
+import re, types, sys, cStringIO, os.path
+
+# <tm> 1 July 2008
+try:
+    import hashlib
+except ImportError:
+    # Preserve backwards compatibility with older versions of Python
+    import md5
+    class Dummy:
+        pass
+    hashlib = Dummy()
+    hashlib.md5 = md5.new
+    del Dummy, md5
+
+# Exception raised for yacc-related errors
+class YaccError(Exception):   pass
+
+#-----------------------------------------------------------------------------
+#                        ===  LR Parsing Engine ===
+#
+# The following classes are used for the LR parser itself.  These are not
+# used during table construction and are independent of the actual LR
+# table generation algorithm
+#-----------------------------------------------------------------------------
+
+# This class is used to hold non-terminal grammar symbols during parsing.
+# It normally has the following attributes set:
+#        .type       = Grammar symbol type
+#        .value      = Symbol value
+#        .lineno     = Starting line number
+#        .endlineno  = Ending line number (optional, set automatically)
+#        .lexpos     = Starting lex position
+#        .endlexpos  = Ending lex position (optional, set automatically)
+
+class YaccSymbol:
+    filename = ''  # <ah>
+    def __str__(self):    return self.type
+    def __repr__(self):   return str(self)
+
+# This class is a wrapper around the objects actually passed to each
+# grammar rule.   Index lookup and assignment actually assign the
+# .value attribute of the underlying YaccSymbol object.
+# The lineno() method returns the line number of a given
+# item (or 0 if not defined).   The linespan() method returns
+# a tuple of (startline,endline) representing the range of lines
+# for a symbol.  The lexspan() method returns a tuple (lexpos,endlexpos)
+# representing the range of positional information for a symbol.
+
+class YaccProduction:
+    def __init__(self,s,stack=None):
+        self.slice = s
+        self.pbstack = []
+        self.stack = stack
+
+    def __getitem__(self,n):
+        if type(n) == types.IntType:
+             if n >= 0: return self.slice[n].value
+             else: return self.stack[n].value
+        else:
+             return [s.value for s in self.slice[n.start:n.stop:n.step]]
+
+    def __setitem__(self,n,v):
+        self.slice[n].value = v
+
+    def __len__(self):
+        return len(self.slice)
+    
+    def lineno(self,n):
+        return getattr(self.slice[n],"lineno",0)
+
+    def linespan(self,n):
+        startline = getattr(self.slice[n],"lineno",0)
+        endline = getattr(self.slice[n],"endlineno",startline)
+        return startline,endline
+
+    def lexpos(self,n):
+        return getattr(self.slice[n],"lexpos",0)
+
+    def lexspan(self,n):
+        startpos = getattr(self.slice[n],"lexpos",0)
+        endpos = getattr(self.slice[n],"endlexpos",startpos)
+        return startpos,endpos
+
+    def pushback(self,n):
+        if n <= 0:
+            raise ValueError, "Expected a positive value"
+        if n > (len(self.slice)-1):
+            raise ValueError, "Can't push %d tokens. Only %d are available." % (n,len(self.slice)-1)
+        for i in range(0,n):
+            self.pbstack.append(self.slice[-i-1])
+
+# The LR Parsing engine.   This is defined as a class so that multiple parsers
+# can exist in the same process.  A user never instantiates this directly.
+# Instead, the global yacc() function should be used to create a suitable Parser
+# object. 
+
+class Parser:
+    # <ah> Remove magic (use ParserPrototype)
+    def __init__(self):
+        # Reset internal state
+        self.productions = None          # List of productions
+        self.errorfunc   = None          # Error handling function
+        self.action      = { }           # LR Action table
+        self.goto        = { }           # LR goto table
+        self.require     = { }           # Attribute require table
+        self.method      = "Unknown LR"  # Table construction method used
+
+        # <ah> 25 Jan 2007
+        self.statestackstack = []
+        self.symstackstack = []
+
+    def errok(self):
+        self.errorcount = 0
+
+    def restart(self):
+        del self.statestack[:]
+        del self.symstack[:]
+        sym = YaccSymbol()
+        sym.type = '$end'
+        sym.parser = self # <tm> 25 June 2008
+        self.symstack.append(sym)
+        self.statestack.append(0)
+
+    def push_state(self):
+        '''Save parser state and restart it.'''
+        # <ah> 25 Jan 2007
+        self.statestackstack.append(self.statestack[:])
+        self.symstackstack.append(self.symstack[:])
+        self.restart()
+
+    def pop_state(self):
+        '''Restore saved parser state.'''
+        # <ah> 25 Jan 2007
+        self.statestack[:] = self.statestackstack.pop()
+        self.symstack[:] = self.symstackstack.pop()
+        
+    def parse(self,input=None,lexer=None,debug=0):
+        lookahead = None                 # Current lookahead symbol
+        lookaheadstack = [ ]             # Stack of lookahead symbols
+        actions = self.action            # Local reference to action table
+        goto    = self.goto              # Local reference to goto table
+        prod    = self.productions       # Local reference to production list
+        pslice  = YaccProduction(None)   # Production object passed to grammar rules
+        pslice.parser = self             # Parser object
+        self.errorcount = 0              # Used during error recovery
+
+        # If no lexer was given, we will try to use the lex module
+        if not lexer:
+            import lex
+            lexer = lex.lexer
+
+        pslice.lexer = lexer
+        
+        # If input was supplied, pass to lexer
+        if input:
+            lexer.input(input)
+
+        # Tokenize function
+        get_token = lexer.token
+
+        statestack = [ ]                # Stack of parsing states
+        self.statestack = statestack
+        symstack   = [ ]                # Stack of grammar symbols
+        self.symstack = symstack
+
+        pslice.stack = symstack         # Put in the production
+        errtoken   = None               # Err token
+
+        # The start state is assumed to be (0,$end)
+        statestack.append(0)
+        sym = YaccSymbol()
+        sym.type = '$end'
+        sym.parser = self # <tm> 25 June 2008
+        symstack.append(sym)
+        
+        while 1:
+            # Get the next symbol on the input.  If a lookahead symbol
+            # is already set, we just use that. Otherwise, we'll pull
+            # the next token off of the lookaheadstack or from the lexer
+            if debug > 1:
+                print 'state', statestack[-1]
+            if not lookahead:
+                if not lookaheadstack:
+                    lookahead = get_token()     # Get the next token
+                else:
+                    lookahead = lookaheadstack.pop()
+                if not lookahead:
+                    lookahead = YaccSymbol()
+                    lookahead.type = '$end'
+                    lookahead.parser = self # <tm> 25 June 2008
+            if debug:
+                errorlead = ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()
+
+            # Check the action table
+            s = statestack[-1]
+            ltype = lookahead.type
+            t = actions.get((s,ltype),None)
+
+            if debug > 1:
+                print 'action', t
+            if t is not None:
+                if t > 0:
+                    # shift a symbol on the stack
+                    if ltype == '$end':
+                        # Error, end of input
+                        sys.stderr.write("yacc: Parse error. EOF\n")
+                        return
+                    statestack.append(t)
+                    if debug > 1:
+                        sys.stderr.write("%-60s shift state %s\n" % (errorlead, t))
+                    symstack.append(lookahead)
+                    lookahead = None
+
+                    # Decrease error count on successful shift
+                    if self.errorcount > 0:
+                        self.errorcount -= 1
+                        
+                    continue
+                
+                if t < 0:
+                    # reduce a symbol on the stack, emit a production
+                    p = prod[-t]
+                    pname = p.name
+                    plen  = p.len
+
+                    # Get production function
+                    sym = YaccSymbol()
+                    sym.type = pname       # Production name
+                    sym.value = None
+                    if debug > 1:
+                        sys.stderr.write("%-60s reduce %d\n" % (errorlead, -t))
+
+                    if plen:
+                        targ = symstack[-plen-1:]
+                        targ[0] = sym
+                        try:
+                            sym.lineno = targ[1].lineno
+                            sym.filename = targ[1].filename
+                            sym.endlineno = getattr(targ[-1],"endlineno",targ[-1].lineno)
+                            sym.lexpos = targ[1].lexpos
+                            sym.endlexpos = getattr(targ[-1],"endlexpos",targ[-1].lexpos)
+                        except AttributeError:
+                            sym.lineno = 0
+                        del symstack[-plen:]
+                        del statestack[-plen:]
+                    else:
+                        sym.lineno = 0
+                        targ = [ sym ]
+                    pslice.slice = targ
+                    pslice.pbstack = []
+                    # Call the grammar rule with our special slice object
+                    p.func(pslice)
+
+                    # If there was a pushback, put that on the stack
+                    if pslice.pbstack:
+                        lookaheadstack.append(lookahead)
+                        for _t in pslice.pbstack:
+                            lookaheadstack.append(_t)
+                        lookahead = None
+
+                    symstack.append(sym)
+                    statestack.append(goto[statestack[-1],pname])
+                    continue
+
+                if t == 0:
+                    n = symstack[-1]
+                    return getattr(n,"value",None)
+                    sys.stderr.write(errorlead, "\n")
+
+            if t == None:
+                if debug:
+                    sys.stderr.write(errorlead + "\n")
+                # We have some kind of parsing error here.  To handle
+                # this, we are going to push the current token onto
+                # the tokenstack and replace it with an 'error' token.
+                # If there are any synchronization rules, they may
+                # catch it.
+                #
+                # In addition to pushing the error token, we call call
+                # the user defined p_error() function if this is the
+                # first syntax error.  This function is only called if
+                # errorcount == 0.
+                if not self.errorcount:
+                    self.errorcount = error_count
+                    errtoken = lookahead
+                    
+                    # <tm> 24 June 2008
+                    # Let EOF error token get through so errorfunc would have
+                    # access to the parser.
+                    
+                    if self.errorfunc:
+                        global errok,token,restart
+                        errok = self.errok        # Set some special functions available in error recovery
+                        token = get_token
+                        restart = self.restart
+                        tok = self.errorfunc(errtoken)
+                        del errok, token, restart   # Delete special functions
+                        
+                        if not self.errorcount:
+                            # User must have done some kind of panic
+                            # mode recovery on their own.  The
+                            # returned token is the next lookahead
+                            lookahead = tok
+                            errtoken = None
+                            continue
+                    else:
+                        if errtoken:
+                            if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
+                            else: lineno = 0
+                            if lineno:
+                                sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
+                            else:
+                                sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
+                        else:
+                            sys.stderr.write("yacc: Parse error in input. EOF\n")
+                            return
+
+                else:
+                    self.errorcount = error_count
+                
+                # case 1:  the statestack only has 1 entry on it.  If we're in this state, the
+                # entire parse has been rolled back and we're completely hosed.   The token is
+                # discarded and we just keep going.
+
+                if len(statestack) <= 1 and lookahead.type != '$end':
+                    lookahead = None
+                    errtoken = None
+                    # Nuke the pushback stack
+                    del lookaheadstack[:]
+                    continue
+
+                # case 2: the statestack has a couple of entries on it, but we're
+                # at the end of the file. nuke the top entry and generate an error token
+
+                # Start nuking entries on the stack
+                if lookahead.type == '$end':
+                    # Whoa. We're really hosed here. Bail out
+                    return 
+
+                if lookahead.type != 'error':
+                    sym = symstack[-1]
+                    if sym.type == 'error':
+                        # Hmmm. Error is on top of stack, we'll just nuke input
+                        # symbol and continue
+                        lookahead = None
+                        continue
+                    t = YaccSymbol()
+                    t.type = 'error'
+                    if hasattr(lookahead,"lineno"):
+                        t.lineno = lookahead.lineno
+                    t.value = lookahead
+                    lookaheadstack.append(lookahead)
+                    lookahead = t
+                else:
+                    symstack.pop()
+                    statestack.pop()
+
+                continue
+
+            # Call an error function here
+            raise RuntimeError, "yacc: internal parser error!!!\n"
+
+# -----------------------------------------------------------------------------
+#                          === Parser Construction ===
+#
+# The following functions and variables are used to implement the yacc() function
+# itself.   This is pretty hairy stuff involving lots of error checking,
+# construction of LR items, kernels, and so forth.   Although a lot of
+# this work is done using global variables, the resulting Parser object
+# is completely self contained--meaning that it is safe to repeatedly
+# call yacc() with different grammars in the same application.
+# -----------------------------------------------------------------------------
+        
+# -----------------------------------------------------------------------------
+# validate_file()
+#
+# This function checks to see if there are duplicated p_rulename() functions
+# in the parser module file.  Without this function, it is really easy for
+# users to make mistakes by cutting and pasting code fragments (and it's a real
+# bugger to try and figure out why the resulting parser doesn't work).  Therefore,
+# we just do a little regular expression pattern matching of def statements
+# to try and detect duplicates.
+# -----------------------------------------------------------------------------
+
+def validate_file(filename):
+    base,ext = os.path.splitext(filename)
+    if ext != '.py': return 1          # No idea. Assume it's okay.
+
+    try:
+        f = open(filename)
+        lines = f.readlines()
+        f.close()
+    except IOError:
+        return 1                       # Oh well
+
+    # Match def p_funcname(
+    fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
+    counthash = { }
+    linen = 1
+    noerror = 1
+    for l in lines:
+        m = fre.match(l)
+        if m:
+            name = m.group(1)
+            prev = counthash.get(name)
+            if not prev:
+                counthash[name] = linen
+            else:
+                sys.stderr.write("%s:%d: Function %s redefined. Previously defined on line %d\n" % (filename,linen,name,prev))
+                noerror = 0
+        linen += 1
+    return noerror
+
+# This function looks for functions that might be grammar rules, but which don't have the proper p_suffix.
+def validate_dict(d):
+    for n,v in d.items(): 
+        if n[0:2] == 'p_' and type(v) in (types.FunctionType, types.MethodType): continue
+        if n[0:2] == 't_': continue
+
+        if n[0:2] == 'p_':
+            sys.stderr.write("yacc: Warning. '%s' not defined as a function\n" % n)
+        if 1 and isinstance(v,types.FunctionType) and v.func_code.co_argcount == 1:
+            try:
+                doc = v.__doc__.split(" ")
+                if doc[1] == ':':
+                    sys.stderr.write("%s:%d: Warning. Possible grammar rule '%s' defined without p_ prefix.\n" % (v.func_code.co_filename, v.func_code.co_firstlineno,n))
+            except StandardError:
+                pass
+
+# -----------------------------------------------------------------------------
+#                           === GRAMMAR FUNCTIONS ===
+#
+# The following global variables and functions are used to store, manipulate,
+# and verify the grammar rules specified by the user.
+# -----------------------------------------------------------------------------
+
+# Initialize all of the global variables used during grammar construction
+def initialize_vars():
+    global Productions, Prodnames, Prodmap, Terminals 
+    global Nonterminals, First, Follow, Precedence, LRitems
+    global Errorfunc, Signature, Requires
+
+    Productions  = [None]  # A list of all of the productions.  The first
+                           # entry is always reserved for the purpose of
+                           # building an augmented grammar
+                        
+    Prodnames    = { }     # A dictionary mapping the names of nonterminals to a list of all
+                           # productions of that nonterminal.
+                        
+    Prodmap      = { }     # A dictionary that is only used to detect duplicate
+                           # productions.
+
+    Terminals    = { }     # A dictionary mapping the names of terminal symbols to a
+                           # list of the rules where they are used.
+
+    Nonterminals = { }     # A dictionary mapping names of nonterminals to a list
+                           # of rule numbers where they are used.
+
+    First        = { }     # A dictionary of precomputed FIRST(x) symbols
+    
+    Follow       = { }     # A dictionary of precomputed FOLLOW(x) symbols
+
+    Precedence   = { }     # Precedence rules for each terminal. Contains tuples of the
+                           # form ('right',level) or ('nonassoc', level) or ('left',level)
+
+    LRitems      = [ ]     # A list of all LR items for the grammar.  These are the
+                           # productions with the "dot" like E -> E . PLUS E
+
+    Errorfunc    = None    # User defined error handler
+    
+    # <tm> 1 July 2008 changed to use hashlib
+    Signature    = hashlib.md5()   # Digital signature of the grammar rules, precedence
+                                   # and other information.  Used to determined when a
+                                   # parsing table needs to be regenerated.
+
+    Requires     = { }     # Requires list
+
+    # File objects used when creating the parser.out debugging file
+    global _vf, _vfc
+    _vf           = cStringIO.StringIO()
+    _vfc          = cStringIO.StringIO()
+
+# -----------------------------------------------------------------------------
+# class Production:
+#
+# This class stores the raw information about a single production or grammar rule.
+# It has a few required attributes:
+#
+#       name     - Name of the production (nonterminal)
+#       prod     - A list of symbols making up its production
+#       number   - Production number.
+#
+# In addition, a few additional attributes are used to help with debugging or
+# optimization of table generation.
+#
+#       file     - File where production action is defined.
+#       lineno   - Line number where action is defined
+#       func     - Action function
+#       prec     - Precedence level
+#       lr_next  - Next LR item. Example, if we are ' E -> E . PLUS E'
+#                  then lr_next refers to 'E -> E PLUS . E'   
+#       lr_index - LR item index (location of the ".") in the prod list.
+#       lookaheads - LALR lookahead symbols for this item
+#       len      - Length of the production (number of symbols on right hand side)
+# -----------------------------------------------------------------------------
+
+class Production:
+    def __init__(self,**kw):
+        for k,v in kw.items():
+            setattr(self,k,v)
+        self.lr_index = -1
+        self.lr0_added = 0    # Flag indicating whether or not added to LR0 closure
+        self.lr1_added = 0    # Flag indicating whether or not added to LR1
+        self.usyms = [ ]
+        self.lookaheads = { }
+        self.lk_added = { }
+        self.setnumbers = [ ]
+        
+    def __str__(self):
+        if self.prod:
+            s = "%s -> %s" % (self.name," ".join(self.prod))
+        else:
+            s = "%s -> <empty>" % self.name
+        return s
+
+    def __repr__(self):
+        return str(self)
+
+    # Compute lr_items from the production
+    def lr_item(self,n):
+        if n > len(self.prod): return None
+        p = Production()
+        p.name = self.name
+        p.prod = list(self.prod)
+        p.number = self.number
+        p.lr_index = n
+        p.lookaheads = { }
+        p.setnumbers = self.setnumbers
+        p.prod.insert(n,".")
+        p.prod = tuple(p.prod)
+        p.len = len(p.prod)
+        p.usyms = self.usyms
+
+        # Precompute list of productions immediately following
+        try:
+            p.lrafter = Prodnames[p.prod[n+1]]
+        except (IndexError,KeyError),e:
+            p.lrafter = []
+        try:
+            p.lrbefore = p.prod[n-1]
+        except IndexError:
+            p.lrbefore = None
+
+        return p
+
+class MiniProduction:
+    pass
+
+# regex matching identifiers
+_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
+
+# -----------------------------------------------------------------------------
+# add_production()
+#
+# Given an action function, this function assembles a production rule.
+# The production rule is assumed to be found in the function's docstring.
+# This rule has the general syntax:
+#
+#              name1 ::= production1
+#                     |  production2
+#                     |  production3
+#                    ...
+#                     |  productionn
+#              name2 ::= production1
+#                     |  production2
+#                    ... 
+# -----------------------------------------------------------------------------
+
+def add_production(f,file,line,prodname,syms):
+    
+    if Terminals.has_key(prodname):
+        sys.stderr.write("%s:%d: Illegal rule name '%s'. Already defined as a token.\n" % (file,line,prodname))
+        return -1
+    if prodname == 'error':
+        sys.stderr.write("%s:%d: Illegal rule name '%s'. error is a reserved word.\n" % (file,line,prodname))
+        return -1
+                
+    if not _is_identifier.match(prodname):
+        sys.stderr.write("%s:%d: Illegal rule name '%s'\n" % (file,line,prodname))
+        return -1
+
+    for x in range(len(syms)):
+        s = syms[x]
+        if s[0] in "'\"":
+             try:
+                 c = eval(s)
+                 if (len(c) > 1):
+                      sys.stderr.write("%s:%d: Literal token %s in rule '%s' may only be a single character\n" % (file,line,s, prodname)) 
+                      return -1
+                 if not Terminals.has_key(c):
+                      Terminals[c] = []
+                 syms[x] = c
+                 continue
+             except SyntaxError:
+                 pass
+        if not _is_identifier.match(s) and s != '%prec':
+            sys.stderr.write("%s:%d: Illegal name '%s' in rule '%s'\n" % (file,line,s, prodname))
+            return -1
+
+    # See if the rule is already in the rulemap
+    map = "%s -> %s" % (prodname,syms)
+    if Prodmap.has_key(map):
+        m = Prodmap[map]
+        sys.stderr.write("%s:%d: Duplicate rule %s.\n" % (file,line, m))
+        sys.stderr.write("%s:%d: Previous definition at %s:%d\n" % (file,line, m.file, m.line))
+        return -1
+
+    p = Production()
+    p.name = prodname
+    p.prod = syms
+    p.file = file
+    p.line = line
+    p.func = f
+    p.number = len(Productions)
+
+            
+    Productions.append(p)
+    Prodmap[map] = p
+    if not Nonterminals.has_key(prodname):
+        Nonterminals[prodname] = [ ]
+    
+    # Add all terminals to Terminals
+    i = 0
+    while i < len(p.prod):
+        t = p.prod[i]
+        if t == '%prec':
+            try:
+                precname = p.prod[i+1]
+            except IndexError:
+                sys.stderr.write("%s:%d: Syntax error. Nothing follows %%prec.\n" % (p.file,p.line))
+                return -1
+
+            prec = Precedence.get(precname,None)
+            if not prec:
+                sys.stderr.write("%s:%d: Nothing known about the precedence of '%s'\n" % (p.file,p.line,precname))
+                return -1
+            else:
+                p.prec = prec
+            del p.prod[i]
+            del p.prod[i]
+            continue
+
+        if Terminals.has_key(t):
+            Terminals[t].append(p.number)
+            # Is a terminal.  We'll assign a precedence to p based on this
+            if not hasattr(p,"prec"):
+                p.prec = Precedence.get(t,('right',0))
+        else:
+            if not Nonterminals.has_key(t):
+                Nonterminals[t] = [ ]
+            Nonterminals[t].append(p.number)
+        i += 1
+
+    if not hasattr(p,"prec"):
+        p.prec = ('right',0)
+        
+    # Set final length of productions
+    p.len  = len(p.prod)
+    p.prod = tuple(p.prod)
+
+    # Calculate unique syms in the production
+    p.usyms = [ ]
+    for s in p.prod:
+        if s not in p.usyms:
+            p.usyms.append(s)
+    
+    # Add to the global productions list
+    try:
+        Prodnames[p.name].append(p)
+    except KeyError:
+        Prodnames[p.name] = [ p ]
+    return 0
+
+# Given a raw rule function, this function rips out its doc string
+# and adds rules to the grammar
+
+def add_function(f):
+    line = f.func_code.co_firstlineno
+    file = f.func_code.co_filename
+    error = 0
+
+    if isinstance(f,types.MethodType):
+        reqdargs = 2
+    else:
+        reqdargs = 1
+        
+    if f.func_code.co_argcount > reqdargs:
+        sys.stderr.write("%s:%d: Rule '%s' has too many arguments.\n" % (file,line,f.__name__))
+        return -1
+
+    if f.func_code.co_argcount < reqdargs:
+        sys.stderr.write("%s:%d: Rule '%s' requires an argument.\n" % (file,line,f.__name__))
+        return -1
+          
+    if f.__doc__:
+        # Split the doc string into lines
+        pstrings = f.__doc__.splitlines()
+        lastp = None
+        dline = line
+        for ps in pstrings:
+            dline += 1
+            p = ps.split()
+            if not p: continue
+            try:
+                if p[0] == '|':
+                    # This is a continuation of a previous rule
+                    if not lastp:
+                        sys.stderr.write("%s:%d: Misplaced '|'.\n" % (file,dline))
+                        return -1
+                    prodname = lastp
+                    if len(p) > 1:
+                        syms = p[1:]
+                    else:
+                        syms = [ ]
+                else:
+                    prodname = p[0]
+                    lastp = prodname
+                    assign = p[1]
+                    if len(p) > 2:
+                        syms = p[2:]
+                    else:
+                        syms = [ ]
+                    if assign != ':' and assign != '::=':
+                        sys.stderr.write("%s:%d: Syntax error. Expected ':'\n" % (file,dline))
+                        return -1
+                         
+ 
+                e = add_production(f,file,dline,prodname,syms)
+                error += e
+
+                
+            except StandardError:
+                sys.stderr.write("%s:%d: Syntax error in rule '%s'\n" % (file,dline,ps))
+                error -= 1
+    else:
+        sys.stderr.write("%s:%d: No documentation string specified in function '%s'\n" % (file,line,f.__name__))
+    return error
+
+
+# Cycle checking code (Michael Dyck)
+
+def compute_reachable():
+    '''
+    Find each symbol that can be reached from the start symbol.
+    Print a warning for any nonterminals that can't be reached.
+    (Unused terminals have already had their warning.)
+    '''
+    Reachable = { }
+    for s in Terminals.keys() + Nonterminals.keys():
+        Reachable[s] = 0
+
+    mark_reachable_from( Productions[0].prod[0], Reachable )
+
+    for s in Nonterminals.keys():
+        if not Reachable[s]:
+            sys.stderr.write("yacc: Symbol '%s' is unreachable.\n" % s)
+
+def mark_reachable_from(s, Reachable):
+    '''
+    Mark all symbols that are reachable from symbol s.
+    '''
+    if Reachable[s]:
+        # We've already reached symbol s.
+        return
+    Reachable[s] = 1
+    for p in Prodnames.get(s,[]):
+        for r in p.prod:
+            mark_reachable_from(r, Reachable)
+
+# -----------------------------------------------------------------------------
+# compute_terminates()
+#
+# This function looks at the various parsing rules and tries to detect
+# infinite recursion cycles (grammar rules where there is no possible way
+# to derive a string of only terminals).
+# -----------------------------------------------------------------------------
+def compute_terminates():
+    '''
+    Raise an error for any symbols that don't terminate.
+    '''
+    Terminates = {}
+
+    # Terminals:
+    for t in Terminals.keys():
+        Terminates[t] = 1
+
+    Terminates['$end'] = 1
+
+    # Nonterminals:
+
+    # Initialize to false:
+    for n in Nonterminals.keys():
+        Terminates[n] = 0
+
+    # Then propagate termination until no change:
+    while 1:
+        some_change = 0
+        for (n,pl) in Prodnames.items():
+            # Nonterminal n terminates iff any of its productions terminates.
+            for p in pl:
+                # Production p terminates iff all of its rhs symbols terminate.
+                for s in p.prod:
+                    if not Terminates[s]:
+                        # The symbol s does not terminate,
+                        # so production p does not terminate.
+                        p_terminates = 0
+                        break
+                else:
+                    # didn't break from the loop,
+                    # so every symbol s terminates
+                    # so production p terminates.
+                    p_terminates = 1
+
+                if p_terminates:
+                    # symbol n terminates!
+                    if not Terminates[n]:
+                        Terminates[n] = 1
+                        some_change = 1
+                    # Don't need to consider any more productions for this n.
+                    break
+
+        if not some_change:
+            break
+
+    some_error = 0
+    for (s,terminates) in Terminates.items():
+        if not terminates:
+            if not Prodnames.has_key(s) and not Terminals.has_key(s) and s != 'error':
+                # s is used-but-not-defined, and we've already warned of that,
+                # so it would be overkill to say that it's also non-terminating.
+                pass
+            else:
+                sys.stderr.write("yacc: Infinite recursion detected for symbol '%s'.\n" % s)
+                some_error = 1
+
+    return some_error
+
+# -----------------------------------------------------------------------------
+# verify_productions()
+#
+# This function examines all of the supplied rules to see if they seem valid.
+# -----------------------------------------------------------------------------
+def verify_productions(cycle_check=1):
+    error = 0
+    for p in Productions:
+        if not p: continue
+
+        for s in p.prod:
+            if not Prodnames.has_key(s) and not Terminals.has_key(s) and s != 'error':
+                sys.stderr.write("%s:%d: Symbol '%s' used, but not defined as a token or a rule.\n" % (p.file,p.line,s))
+                error = 1
+                continue
+
+    unused_tok = 0 
+    # Now verify all of the tokens
+    if yaccdebug:
+        _vf.write("Unused terminals:\n\n")
+    for s,v in Terminals.items():
+        if s != 'error' and not v:
+            sys.stderr.write("yacc: Warning. Token '%s' defined, but not used.\n" % s)
+            if yaccdebug: _vf.write("   %s\n"% s)
+            unused_tok += 1
+
+    # Print out all of the productions
+    if yaccdebug:
+        _vf.write("\nGrammar\n\n")
+        for i in range(1,len(Productions)):
+            _vf.write("Rule %-5d %s\n" % (i, Productions[i]))
+        
+    unused_prod = 0
+    # Verify the use of all productions
+    for s,v in Nonterminals.items():
+        if not v:
+            p = Prodnames[s][0]
+            sys.stderr.write("%s:%d: Warning. Rule '%s' defined, but not used.\n" % (p.file,p.line, s))
+            unused_prod += 1
+
+    
+    if unused_tok == 1:
+        sys.stderr.write("yacc: Warning. There is 1 unused token.\n")
+    if unused_tok > 1:
+        sys.stderr.write("yacc: Warning. There are %d unused tokens.\n" % unused_tok)
+
+    if unused_prod == 1:
+        sys.stderr.write("yacc: Warning. There is 1 unused rule.\n")
+    if unused_prod > 1:
+        sys.stderr.write("yacc: Warning. There are %d unused rules.\n" % unused_prod)
+
+    if yaccdebug:
+        _vf.write("\nTerminals, with rules where they appear\n\n")
+        ks = Terminals.keys()
+        ks.sort()
+        for k in ks:
+            _vf.write("%-20s : %s\n" % (k, " ".join([str(s) for s in Terminals[k]])))
+        _vf.write("\nNonterminals, with rules where they appear\n\n")
+        ks = Nonterminals.keys()
+        ks.sort()
+        for k in ks:
+            _vf.write("%-20s : %s\n" % (k, " ".join([str(s) for s in Nonterminals[k]])))
+
+    if (cycle_check):
+        compute_reachable()
+        error += compute_terminates()
+#        error += check_cycles()
+    return error
+
+# -----------------------------------------------------------------------------
+# build_lritems()
+#
+# This function walks the list of productions and builds a complete set of the
+# LR items.  The LR items are stored in two ways:  First, they are uniquely
+# numbered and placed in the list _lritems.  Second, a linked list of LR items
+# is built for each production.  For example:
+#
+#   E -> E PLUS E
+#
+# Creates the list
+#
+#  [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ] 
+# -----------------------------------------------------------------------------
+
+def build_lritems():
+    for p in Productions:
+        lastlri = p
+        lri = p.lr_item(0)
+        i = 0
+        while 1:
+            lri = p.lr_item(i)
+            lastlri.lr_next = lri
+            if not lri: break
+            lri.lr_num = len(LRitems)
+            LRitems.append(lri)
+            lastlri = lri
+            i += 1
+
+    # In order for the rest of the parser generator to work, we need to
+    # guarantee that no more lritems are generated.  Therefore, we nuke
+    # the p.lr_item method.  (Only used in debugging)
+    # Production.lr_item = None
+
+# -----------------------------------------------------------------------------
+# add_precedence()
+#
+# Given a list of precedence rules, add to the precedence table.
+# -----------------------------------------------------------------------------
+
+def add_precedence(plist):
+    plevel = 0
+    error = 0
+    for p in plist:
+        plevel += 1
+        try:
+            prec = p[0]
+            terms = p[1:]
+            if prec != 'left' and prec != 'right' and prec != 'nonassoc':
+                sys.stderr.write("yacc: Invalid precedence '%s'\n" % prec)
+                return -1
+            for t in terms:
+                if Precedence.has_key(t):
+                    sys.stderr.write("yacc: Precedence already specified for terminal '%s'\n" % t)
+                    error += 1
+                    continue
+                Precedence[t] = (prec,plevel)
+        except:
+            sys.stderr.write("yacc: Invalid precedence table.\n")
+            error += 1
+
+    return error
+
+# -----------------------------------------------------------------------------
+# augment_grammar()
+#
+# Compute the augmented grammar.  This is just a rule S' -> start where start
+# is the starting symbol.
+# -----------------------------------------------------------------------------
+
+def augment_grammar(start=None):
+    if not start:
+        start = Productions[1].name
+    Productions[0] = Production(name="S'",prod=[start],number=0,len=1,prec=('right',0),func=None)
+    Productions[0].usyms = [ start ]
+    Nonterminals[start].append(0)
+
+
+# -------------------------------------------------------------------------
+# first()
+#
+# Compute the value of FIRST1(beta) where beta is a tuple of symbols.
+#
+# During execution of compute_first1, the result may be incomplete.
+# Afterward (e.g., when called from compute_follow()), it will be complete.
+# -------------------------------------------------------------------------
+def first(beta):
+
+    # We are computing First(x1,x2,x3,...,xn)
+    result = [ ]
+    for x in beta:
+        x_produces_empty = 0
+
+        # Add all the non-<empty> symbols of First[x] to the result.
+        for f in First[x]:
+            if f == '<empty>':
+                x_produces_empty = 1
+            else:
+                if f not in result: result.append(f)
+
+        if x_produces_empty:
+            # We have to consider the next x in beta,
+            # i.e. stay in the loop.
+            pass
+        else:
+            # We don't have to consider any further symbols in beta.
+            break
+    else:
+        # There was no 'break' from the loop,
+        # so x_produces_empty was true for all x in beta,
+        # so beta produces empty as well.
+        result.append('<empty>')
+
+    return result
+
+
+# FOLLOW(x)
+# Given a non-terminal.  This function computes the set of all symbols
+# that might follow it.  Dragon book, p. 189.
+
+def compute_follow(start=None):
+    # Add '$end' to the follow list of the start symbol
+    for k in Nonterminals.keys():
+        Follow[k] = [ ]
+
+    if not start:
+        start = Productions[1].name
+        
+    Follow[start] = [ '$end' ]
+        
+    while 1:
+        didadd = 0
+        for p in Productions[1:]:
+            # Here is the production set
+            for i in range(len(p.prod)):
+                B = p.prod[i]
+                if Nonterminals.has_key(B):
+                    # Okay. We got a non-terminal in a production
+                    fst = first(p.prod[i+1:])
+                    hasempty = 0
+                    for f in fst:
+                        if f != '<empty>' and f not in Follow[B]:
+                            Follow[B].append(f)
+                            didadd = 1
+                        if f == '<empty>':
+                            hasempty = 1
+                    if hasempty or i == (len(p.prod)-1):
+                        # Add elements of follow(a) to follow(b)
+                        for f in Follow[p.name]:
+                            if f not in Follow[B]:
+                                Follow[B].append(f)
+                                didadd = 1
+        if not didadd: break
+
+    if 0 and yaccdebug:
+        _vf.write('\nFollow:\n')
+        for k in Nonterminals.keys():
+            _vf.write("%-20s : %s\n" % (k, " ".join([str(s) for s in Follow[k]])))
+
+# -------------------------------------------------------------------------
+# compute_first1()
+#
+# Compute the value of FIRST1(X) for all symbols
+# -------------------------------------------------------------------------
+def compute_first1():
+
+    # Terminals:
+    for t in Terminals.keys():
+        First[t] = [t]
+
+    First['$end'] = ['$end']
+    First['#'] = ['#'] # what's this for?
+
+    # Nonterminals:
+
+    # Initialize to the empty set:
+    for n in Nonterminals.keys():
+        First[n] = []
+
+    # Then propagate symbols until no change:
+    while 1:
+        some_change = 0
+        for n in Nonterminals.keys():
+            for p in Prodnames[n]:
+                for f in first(p.prod):
+                    if f not in First[n]:
+                        First[n].append( f )
+                        some_change = 1
+        if not some_change:
+            break
+
+    if 0 and yaccdebug:
+        _vf.write('\nFirst:\n')
+        for k in Nonterminals.keys():
+            _vf.write("%-20s : %s\n" %
+                (k, " ".join([str(s) for s in First[k]])))
+
+# -----------------------------------------------------------------------------
+#                           === SLR Generation ===
+#
+# The following functions are used to construct SLR (Simple LR) parsing tables
+# as described on p.221-229 of the dragon book.
+# -----------------------------------------------------------------------------
+
+# Global variables for the LR parsing engine
+def lr_init_vars():
+    global _lr_action, _lr_goto, _lr_method
+    global _lr_goto_cache, _lr0_cidhash
+    
+    _lr_action       = { }        # Action table
+    _lr_goto         = { }        # Goto table
+    _lr_method       = "Unknown"  # LR method used
+    _lr_goto_cache   = { }
+    _lr0_cidhash     = { }
+
+
+# Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
+# prodlist is a list of productions.
+
+_add_count = 0       # Counter used to detect cycles
+
+def lr0_closure(I):
+    global _add_count
+    
+    _add_count += 1
+    prodlist = Productions
+    
+    # Add everything in I to J        
+    J = I[:]
+    didadd = 1
+    while didadd:
+        didadd = 0
+        for j in J:
+            for x in j.lrafter:
+                if x.lr0_added == _add_count: continue
+                # Add B --> .G to J
+                J.append(x.lr_next)
+                x.lr0_added = _add_count
+                didadd = 1
+               
+    return J
+
+# Compute the LR(0) goto function goto(I,X) where I is a set
+# of LR(0) items and X is a grammar symbol.   This function is written
+# in a way that guarantees uniqueness of the generated goto sets
+# (i.e. the same goto set will never be returned as two different Python
+# objects).  With uniqueness, we can later do fast set comparisons using
+# id(obj) instead of element-wise comparison.
+
+def lr0_goto(I,x):
+    # First we look for a previously cached entry
+    g = _lr_goto_cache.get((id(I),x),None)
+    if g: return g
+
+    # Now we generate the goto set in a way that guarantees uniqueness
+    # of the result
+    
+    s = _lr_goto_cache.get(x,None)
+    if not s:
+        s = { }
+        _lr_goto_cache[x] = s
+
+    gs = [ ]
+    for p in I:
+        n = p.lr_next
+        if n and n.lrbefore == x:
+            s1 = s.get(id(n),None)
+            if not s1:
+                s1 = { }
+                s[id(n)] = s1
+            gs.append(n)
+            s = s1
+    g = s.get('$end',None)
+    if not g:
+        if gs:
+            g = lr0_closure(gs)
+            s['$end'] = g
+        else:
+            s['$end'] = gs
+    _lr_goto_cache[(id(I),x)] = g
+    return g
+
+_lr0_cidhash = { }
+
+# Compute the LR(0) sets of item function
+def lr0_items():
+    
+    C = [ lr0_closure([Productions[0].lr_next]) ]
+    i = 0
+    for I in C:
+        _lr0_cidhash[id(I)] = i
+        i += 1
+
+    # Loop over the items in C and each grammar symbols
+    i = 0
+    while i < len(C):
+        I = C[i]
+        i += 1
+
+        # Collect all of the symbols that could possibly be in the goto(I,X) sets
+        asyms = { }
+        for ii in I:
+            for s in ii.usyms:
+                asyms[s] = None
+
+        for x in asyms.keys():
+            g = lr0_goto(I,x)
+            if not g:  continue
+            if _lr0_cidhash.has_key(id(g)): continue
+            _lr0_cidhash[id(g)] = len(C)            
+            C.append(g)
+            
+    return C
+
+# -----------------------------------------------------------------------------
+#                       ==== LALR(1) Parsing ====
+#
+# LALR(1) parsing is almost exactly the same as SLR except that instead of
+# relying upon Follow() sets when performing reductions, a more selective
+# lookahead set that incorporates the state of the LR(0) machine is utilized.
+# Thus, we mainly just have to focus on calculating the lookahead sets.
+#
+# The method used here is due to DeRemer and Pennelo (1982).
+#
+# DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
+#     Lookahead Sets", ACM Transactions on Programming Languages and Systems,
+#     Vol. 4, No. 4, Oct. 1982, pp. 615-649
+#
+# Further details can also be found in:
+#
+#  J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
+#      McGraw-Hill Book Company, (1985).
+#
+# Note:  This implementation is a complete replacement of the LALR(1) 
+#        implementation in PLY-1.x releases.   That version was based on
+#        a less efficient algorithm and it had bugs in its implementation.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# compute_nullable_nonterminals()
+#
+# Creates a dictionary containing all of the non-terminals that might produce
+# an empty production.   
+# -----------------------------------------------------------------------------
+
+def compute_nullable_nonterminals():
+    nullable = {}
+    num_nullable = 0
+    while 1:
+       for p in Productions[1:]:
+           if p.len == 0:
+                nullable[p.name] = 1
+                continue
+           for t in p.prod:
+                if not nullable.has_key(t): break
+           else:
+                nullable[p.name] = 1
+       if len(nullable) == num_nullable: break
+       num_nullable = len(nullable)
+    return nullable
+
+# -----------------------------------------------------------------------------
+# find_nonterminal_trans(C)
+#
+# Given a set of LR(0) items, this functions finds all of the non-terminal
+# transitions.    These are transitions in which a dot appears immediately before
+# a non-terminal.   Returns a list of tuples of the form (state,N) where state
+# is the state number and N is the nonterminal symbol.
+#
+# The input C is the set of LR(0) items.
+# -----------------------------------------------------------------------------
+
+def find_nonterminal_transitions(C):
+     trans = []
+     for state in range(len(C)):
+         for p in C[state]:
+             if p.lr_index < p.len - 1:
+                  t = (state,p.prod[p.lr_index+1])
+                  if Nonterminals.has_key(t[1]):
+                        if t not in trans: trans.append(t)
+         state = state + 1
+     return trans
+
+# -----------------------------------------------------------------------------
+# dr_relation()
+#
+# Computes the DR(p,A) relationships for non-terminal transitions.  The input
+# is a tuple (state,N) where state is a number and N is a nonterminal symbol.
+#
+# Returns a list of terminals.
+# -----------------------------------------------------------------------------
+
+def dr_relation(C,trans,nullable):
+    dr_set = { }
+    state,N = trans
+    terms = []
+
+    g = lr0_goto(C[state],N)
+    for p in g:
+       if p.lr_index < p.len - 1:
+           a = p.prod[p.lr_index+1]
+           if Terminals.has_key(a):
+               if a not in terms: terms.append(a)
+
+    # This extra bit is to handle the start state
+    if state == 0 and N == Productions[0].prod[0]:
+       terms.append('$end')
+ 
+    return terms
+
+# -----------------------------------------------------------------------------
+# reads_relation()
+#
+# Computes the READS() relation (p,A) READS (t,C).
+# -----------------------------------------------------------------------------
+
+def reads_relation(C, trans, empty):
+    # Look for empty transitions
+    rel = []
+    state, N = trans
+
+    g = lr0_goto(C[state],N)
+    j = _lr0_cidhash.get(id(g),-1)
+    for p in g:
+        if p.lr_index < p.len - 1:
+             a = p.prod[p.lr_index + 1]
+             if empty.has_key(a):
+                  rel.append((j,a))
+
+    return rel
+
+# -----------------------------------------------------------------------------
+# compute_lookback_includes()
+#
+# Determines the lookback and includes relations
+#
+# LOOKBACK:
+# 
+# This relation is determined by running the LR(0) state machine forward.
+# For example, starting with a production "N : . A B C", we run it forward
+# to obtain "N : A B C ."   We then build a relationship between this final
+# state and the starting state.   These relationships are stored in a dictionary
+# lookdict.   
+#
+# INCLUDES:
+#
+# Computes the INCLUDE() relation (p,A) INCLUDES (p',B).   
+#
+# This relation is used to determine non-terminal transitions that occur
+# inside of other non-terminal transition states.   (p,A) INCLUDES (p', B)
+# if the following holds:
+#
+#       B -> LAT, where T -> epsilon and p' -L-> p 
+#
+# L is essentially a prefix (which may be empty), T is a suffix that must be
+# able to derive an empty string.  State p' must lead to state p with the string L.
+# 
+# -----------------------------------------------------------------------------
+
+def compute_lookback_includes(C,trans,nullable):
+    
+    lookdict = {}          # Dictionary of lookback relations
+    includedict = {}       # Dictionary of include relations
+
+    # Make a dictionary of non-terminal transitions
+    dtrans = {}
+    for t in trans:
+        dtrans[t] = 1
+    
+    # Loop over all transitions and compute lookbacks and includes
+    for state,N in trans:
+        lookb = []
+        includes = []
+        for p in C[state]:
+            if p.name != N: continue
+        
+            # Okay, we have a name match.  We now follow the production all the way
+            # through the state machine until we get the . on the right hand side
+
+            lr_index = p.lr_index
+            j = state
+            while lr_index < p.len - 1:
+                 lr_index = lr_index + 1
+                 t = p.prod[lr_index]
+
+                 # Check to see if this symbol and state are a non-terminal transition
+                 if dtrans.has_key((j,t)):
+                       # Yes.  Okay, there is some chance that this is an includes relation
+                       # the only way to know for certain is whether the rest of the 
+                       # production derives empty
+
+                       li = lr_index + 1
+                       while li < p.len:
+                            if Terminals.has_key(p.prod[li]): break      # No forget it
+                            if not nullable.has_key(p.prod[li]): break
+                            li = li + 1
+                       else:
+                            # Appears to be a relation between (j,t) and (state,N)
+                            includes.append((j,t))
+
+                 g = lr0_goto(C[j],t)               # Go to next set             
+                 j = _lr0_cidhash.get(id(g),-1)     # Go to next state
+             
+            # When we get here, j is the final state, now we have to locate the production
+            for r in C[j]:
+                 if r.name != p.name: continue
+                 if r.len != p.len:   continue
+                 i = 0
+                 # This look is comparing a production ". A B C" with "A B C ."
+                 while i < r.lr_index:
+                      if r.prod[i] != p.prod[i+1]: break
+                      i = i + 1
+                 else:
+                      lookb.append((j,r))
+        for i in includes:
+             if not includedict.has_key(i): includedict[i] = []
+             includedict[i].append((state,N))
+        lookdict[(state,N)] = lookb
+
+    return lookdict,includedict
+
+# -----------------------------------------------------------------------------
+# digraph()
+# traverse()
+#
+# The following two functions are used to compute set valued functions
+# of the form:
+#
+#     F(x) = F'(x) U U{F(y) | x R y}
+#
+# This is used to compute the values of Read() sets as well as FOLLOW sets
+# in LALR(1) generation.
+#
+# Inputs:  X    - An input set
+#          R    - A relation
+#          FP   - Set-valued function
+# ------------------------------------------------------------------------------
+
+def digraph(X,R,FP):
+    N = { }
+    for x in X:
+       N[x] = 0
+    stack = []
+    F = { }
+    for x in X:
+        if N[x] == 0: traverse(x,N,stack,F,X,R,FP)
+    return F
+
+def traverse(x,N,stack,F,X,R,FP):
+    stack.append(x)
+    d = len(stack)
+    N[x] = d
+    F[x] = FP(x)             # F(X) <- F'(x)
+    
+    rel = R(x)               # Get y's related to x
+    for y in rel:
+        if N[y] == 0:
+             traverse(y,N,stack,F,X,R,FP)
+        N[x] = min(N[x],N[y])
+        for a in F.get(y,[]):
+            if a not in F[x]: F[x].append(a)
+    if N[x] == d:
+       N[stack[-1]] = sys.maxint
+       F[stack[-1]] = F[x]
+       element = stack.pop()
+       while element != x:
+           N[stack[-1]] = sys.maxint
+           F[stack[-1]] = F[x]
+           element = stack.pop()
+
+# -----------------------------------------------------------------------------
+# compute_read_sets()
+#
+# Given a set of LR(0) items, this function computes the read sets.
+#
+# Inputs:  C        =  Set of LR(0) items
+#          ntrans   = Set of nonterminal transitions
+#          nullable = Set of empty transitions
+#
+# Returns a set containing the read sets
+# -----------------------------------------------------------------------------
+
+def compute_read_sets(C, ntrans, nullable):
+    FP = lambda x: dr_relation(C,x,nullable)
+    R =  lambda x: reads_relation(C,x,nullable)
+    F = digraph(ntrans,R,FP)
+    return F
+
+# -----------------------------------------------------------------------------
+# compute_follow_sets()
+#
+# Given a set of LR(0) items, a set of non-terminal transitions, a readset, 
+# and an include set, this function computes the follow sets
+#
+# Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
+#
+# Inputs:    
+#            ntrans     = Set of nonterminal transitions
+#            readsets   = Readset (previously computed)
+#            inclsets   = Include sets (previously computed)
+#
+# Returns a set containing the follow sets      
+# -----------------------------------------------------------------------------
+
+def compute_follow_sets(ntrans,readsets,inclsets):
+     FP = lambda x: readsets[x]
+     R  = lambda x: inclsets.get(x,[])
+     F = digraph(ntrans,R,FP)
+     return F
+
+# -----------------------------------------------------------------------------
+# add_lookaheads()
+#
+# Attaches the lookahead symbols to grammar rules. 
+#
+# Inputs:    lookbacks         -  Set of lookback relations
+#            followset         -  Computed follow set
+#
+# This function directly attaches the lookaheads to productions contained
+# in the lookbacks set
+# -----------------------------------------------------------------------------
+
+def add_lookaheads(lookbacks,followset):
+    for trans,lb in lookbacks.items():
+        # Loop over productions in lookback
+        for state,p in lb:
+             if not p.lookaheads.has_key(state):
+                  p.lookaheads[state] = []
+             f = followset.get(trans,[])
+             for a in f:
+                  if a not in p.lookaheads[state]: p.lookaheads[state].append(a)
+
+# -----------------------------------------------------------------------------
+# add_lalr_lookaheads()
+#
+# This function does all of the work of adding lookahead information for use
+# with LALR parsing
+# -----------------------------------------------------------------------------
+
+def add_lalr_lookaheads(C):
+    # Determine all of the nullable nonterminals
+    nullable = compute_nullable_nonterminals()
+
+    # Find all non-terminal transitions
+    trans = find_nonterminal_transitions(C)
+
+    # Compute read sets
+    readsets = compute_read_sets(C,trans,nullable)
+
+    # Compute lookback/includes relations
+    lookd, included = compute_lookback_includes(C,trans,nullable)
+
+    # Compute LALR FOLLOW sets
+    followsets = compute_follow_sets(trans,readsets,included)
+    
+    # Add all of the lookaheads
+    add_lookaheads(lookd,followsets)
+
+# -----------------------------------------------------------------------------
+# lr_parse_table()
+#
+# This function constructs the parse tables for SLR or LALR
+# -----------------------------------------------------------------------------
+def lr_parse_table(method):
+    global _lr_method
+    goto = _lr_goto           # Goto array
+    action = _lr_action       # Action array
+    actionp = { }             # Action production array (temporary)
+
+    _lr_method = method
+    
+    n_srconflict = 0
+    n_rrconflict = 0
+
+    if yaccdebug:
+        sys.stderr.write("yacc: Generating %s parsing table...\n" % method)        
+        _vf.write("\n\nParsing method: %s\n\n" % method)
+        
+    # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
+    # This determines the number of states
+    
+    C = lr0_items()
+
+    if method == 'LALR':
+        add_lalr_lookaheads(C)
+
+    # Build the parser table, state by state
+    st = 0
+    for I in C:
+        # Loop over each production in I
+        actlist = [ ]              # List of actions
+        
+        if yaccdebug:
+            _vf.write("\nstate %d\n\n" % st)
+            for p in I:
+                _vf.write("    (%d) %s\n" % (p.number, str(p)))
+            _vf.write("\n")
+
+        for p in I:
+            try:
+                if p.prod[-1] == ".":
+                    if p.name == "S'":
+                        # Start symbol. Accept!
+                        action[st,"$end"] = 0
+                        actionp[st,"$end"] = p
+                    else:
+                        # We are at the end of a production.  Reduce!
+                        if method == 'LALR':
+                            laheads = p.lookaheads[st]
+                        else:
+                            laheads = Follow[p.name]
+                        for a in laheads:
+                            actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p)))
+                            r = action.get((st,a),None)
+                            if r is not None:
+                                # Whoa. Have a shift/reduce or reduce/reduce conflict
+                                if r > 0:
+                                    # Need to decide on shift or reduce here
+                                    # By default we favor shifting. Need to add
+                                    # some precedence rules here.
+                                    sprec,slevel = Productions[actionp[st,a].number].prec                                    
+                                    rprec,rlevel = Precedence.get(a,('right',0))
+                                    if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
+                                        # We really need to reduce here.  
+                                        action[st,a] = -p.number
+                                        actionp[st,a] = p
+                                        if not slevel and not rlevel:
+                                            _vfc.write("shift/reduce conflict in state %d resolved as reduce.\n" % st)
+                                            _vf.write("  ! shift/reduce conflict for %s resolved as reduce.\n" % a)
+                                            n_srconflict += 1
+                                    elif (slevel == rlevel) and (rprec == 'nonassoc'):
+                                        action[st,a] = None
+                                    else:
+                                        # Hmmm. Guess we'll keep the shift
+                                        if not rlevel:
+                                            _vfc.write("shift/reduce conflict in state %d resolved as shift.\n" % st)
+                                            _vf.write("  ! shift/reduce conflict for %s resolved as shift.\n" % a)
+                                            n_srconflict +=1                                    
+                                elif r < 0:
+                                    # Reduce/reduce conflict.   In this case, we favor the rule
+                                    # that was defined first in the grammar file
+                                    oldp = Productions[-r]
+                                    pp = Productions[p.number]
+                                    if oldp.line > pp.line:
+                                        action[st,a] = -p.number
+                                        actionp[st,a] = p
+                                    # sys.stderr.write("Reduce/reduce conflict in state %d\n" % st)
+                                    n_rrconflict += 1
+                                    _vfc.write("reduce/reduce conflict in state %d resolved using rule %d (%s).\n" % (st, actionp[st,a].number, actionp[st,a]))
+                                    _vf.write("  ! reduce/reduce conflict for %s resolved using rule %d (%s).\n" % (a,actionp[st,a].number, actionp[st,a]))
+                                else:
+                                    sys.stderr.write("Unknown conflict in state %d\n" % st)
+                            else:
+                                action[st,a] = -p.number
+                                actionp[st,a] = p
+                else:
+                    i = p.lr_index
+                    a = p.prod[i+1]       # Get symbol right after the "."
+                    if Terminals.has_key(a):
+                        g = lr0_goto(I,a)
+                        j = _lr0_cidhash.get(id(g),-1)
+                        if j >= 0:
+                            # We are in a shift state
+                            actlist.append((a,p,"shift and go to state %d" % j))
+                            r = action.get((st,a),None)
+                            if r is not None:
+                                # Whoa have a shift/reduce or shift/shift conflict
+                                if r > 0:
+                                    if r != j:
+                                        sys.stderr.write("Shift/shift conflict in state %d\n" % st)
+                                elif r < 0:
+                                    # Do a precedence check.
+                                    #   -  if precedence of reduce rule is higher, we reduce.
+                                    #   -  if precedence of reduce is same and left assoc, we reduce.
+                                    #   -  otherwise we shift
+                                    rprec,rlevel = Productions[actionp[st,a].number].prec
+                                    sprec,slevel = Precedence.get(a,('right',0))
+                                    if (slevel > rlevel) or ((slevel == rlevel) and (rprec != 'left')):
+                                        # We decide to shift here... highest precedence to shift
+                                        action[st,a] = j
+                                        actionp[st,a] = p
+                                        if not rlevel:
+                                            n_srconflict += 1
+                                            _vfc.write("shift/reduce conflict in state %d resolved as shift.\n" % st)
+                                            _vf.write("  ! shift/reduce conflict for %s resolved as shift.\n" % a)
+                                    elif (slevel == rlevel) and (rprec == 'nonassoc'):
+                                        action[st,a] = None
+                                    else:                                            
+                                        # Hmmm. Guess we'll keep the reduce
+                                        if not slevel and not rlevel:
+                                            n_srconflict +=1
+                                            _vfc.write("shift/reduce conflict in state %d resolved as reduce.\n" % st)
+                                            _vf.write("  ! shift/reduce conflict for %s resolved as reduce.\n" % a)
+                                            
+                                else:
+                                    sys.stderr.write("Unknown conflict in state %d\n" % st)
+                            else:
+                                action[st,a] = j
+                                actionp[st,a] = p
+                                
+            except StandardError,e:
+                raise YaccError, "Hosed in lr_parse_table", e
+
+        # Print the actions associated with each terminal
+        if yaccdebug:
+          _actprint = { }
+          for a,p,m in actlist:
+            if action.has_key((st,a)):
+                if p is actionp[st,a]:
+                    _vf.write("    %-15s %s\n" % (a,m))
+                    _actprint[(a,m)] = 1
+          _vf.write("\n")
+          for a,p,m in actlist:
+            if action.has_key((st,a)):
+                if p is not actionp[st,a]:
+                    if not _actprint.has_key((a,m)):
+                        _vf.write("  ! %-15s [ %s ]\n" % (a,m))
+                        _actprint[(a,m)] = 1
+            
+        # Construct the goto table for this state
+        if yaccdebug:
+            _vf.write("\n")
+        nkeys = { }
+        for ii in I:
+            for s in ii.usyms:
+                if Nonterminals.has_key(s):
+                    nkeys[s] = None
+        for n in nkeys.keys():
+            g = lr0_goto(I,n)
+            j = _lr0_cidhash.get(id(g),-1)            
+            if j >= 0:
+                goto[st,n] = j
+                if yaccdebug:
+                    _vf.write("    %-30s shift and go to state %d\n" % (n,j))
+
+        st += 1
+
+    if yaccdebug:
+        if n_srconflict == 1:
+            sys.stderr.write("yacc: %d shift/reduce conflict\n" % n_srconflict)
+        if n_srconflict > 1:
+            sys.stderr.write("yacc: %d shift/reduce conflicts\n" % n_srconflict)
+        if n_rrconflict == 1:
+            sys.stderr.write("yacc: %d reduce/reduce conflict\n" % n_rrconflict)
+        if n_rrconflict > 1:
+            sys.stderr.write("yacc: %d reduce/reduce conflicts\n" % n_rrconflict)
+
+# -----------------------------------------------------------------------------
+#                          ==== LR Utility functions ====
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# _lr_write_tables()
+#
+# This function writes the LR parsing tables to a file
+# -----------------------------------------------------------------------------
+
+def lr_write_tables(modulename=tab_module,outputdir=''):
+    filename = os.path.join(outputdir,modulename) + ".py"
+    try:
+        f = open(filename,"w")
+
+        f.write("""
+# %s
+# This file is automatically generated. Do not edit.
+
+_lr_method = %s
+
+_lr_signature = %s
+""" % (filename, repr(_lr_method), repr(Signature.digest())))
+
+        # Change smaller to 0 to go back to original tables
+        smaller = 1
+                
+        # Factor out names to try and make smaller
+        if smaller:
+            items = { }
+        
+            for k,v in _lr_action.items():
+                i = items.get(k[1])
+                if not i:
+                    i = ([],[])
+                    items[k[1]] = i
+                i[0].append(k[0])
+                i[1].append(v)
+
+            f.write("\n_lr_action_items = {")
+            for k,v in items.items():
+                f.write("%r:([" % k)
+                for i in v[0]:
+                    f.write("%r," % i)
+                f.write("],[")
+                for i in v[1]:
+                    f.write("%r," % i)
+                           
+                f.write("]),")
+            f.write("}\n")
+
+            f.write("""
+_lr_action = { }
+for _k, _v in _lr_action_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       _lr_action[(_x,_k)] = _y
+del _lr_action_items
+""")
+            
+        else:
+            f.write("\n_lr_action = { ");
+            for k,v in _lr_action.items():
+                f.write("(%r,%r):%r," % (k[0],k[1],v))
+            f.write("}\n");
+
+        if smaller:
+            # Factor out names to try and make smaller
+            items = { }
+        
+            for k,v in _lr_goto.items():
+                i = items.get(k[1])
+                if not i:
+                    i = ([],[])
+                    items[k[1]] = i
+                i[0].append(k[0])
+                i[1].append(v)
+
+            f.write("\n_lr_goto_items = {")
+            for k,v in items.items():
+                f.write("%r:([" % k)
+                for i in v[0]:
+                    f.write("%r," % i)
+                f.write("],[")
+                for i in v[1]:
+                    f.write("%r," % i)
+                           
+                f.write("]),")
+            f.write("}\n")
+
+            f.write("""
+_lr_goto = { }
+for _k, _v in _lr_goto_items.items():
+   for _x,_y in zip(_v[0],_v[1]):
+       _lr_goto[(_x,_k)] = _y
+del _lr_goto_items
+""")
+        else:
+            f.write("\n_lr_goto = { ");
+            for k,v in _lr_goto.items():
+                f.write("(%r,%r):%r," % (k[0],k[1],v))                    
+            f.write("}\n");
+
+        # Write production table
+        f.write("_lr_productions = [\n")
+        for p in Productions:
+            if p:
+                if (p.func):
+                    f.write("  (%r,%d,%r,%r,%d),\n" % (p.name, p.len, p.func.__name__,p.file,p.line))
+                else:
+                    f.write("  (%r,%d,None,None,None),\n" % (p.name, p.len))
+            else:
+                f.write("  None,\n")
+        f.write("]\n")
+        
+        f.close()
+
+    except IOError,e:
+        print "Unable to create '%s'" % filename
+        print e
+        return
+
+def lr_read_tables(module=tab_module,optimize=0):
+    global _lr_action, _lr_goto, _lr_productions, _lr_method
+    try:
+        exec "import %s as parsetab" % module
+        
+        if (optimize) or (Signature.digest() == parsetab._lr_signature):
+            _lr_action = parsetab._lr_action
+            _lr_goto   = parsetab._lr_goto
+            _lr_productions = parsetab._lr_productions
+            _lr_method = parsetab._lr_method
+            return 1
+        else:
+            return 0
+        
+    except (ImportError,AttributeError):
+        return 0
+
+
+# Available instance types.  This is used when parsers are defined by a class.
+# it's a little funky because I want to preserve backwards compatibility
+# with Python 2.0 where types.ObjectType is undefined.
+
+try:
+   _INSTANCETYPE = (types.InstanceType, types.ObjectType)
+except AttributeError:
+   _INSTANCETYPE = types.InstanceType
+
+# -----------------------------------------------------------------------------
+# yacc(module)
+#
+# Build the parser module
+# -----------------------------------------------------------------------------
+
+# <ah> Add parserclass parameter.
+def yacc(method=default_lr, debug=yaccdebug, module=None, tabmodule=tab_module, start=None, check_recursion=1, optimize=0,write_tables=1,debugfile=debug_file,outputdir='', parserclass=Parser):
+    global yaccdebug
+    yaccdebug = debug
+    
+    initialize_vars()
+    files = { }
+    error = 0
+
+
+    # Add parsing method to signature
+    Signature.update(method)
+    
+    # If a "module" parameter was supplied, extract its dictionary.
+    # Note: a module may in fact be an instance as well.
+    
+    if module:
+        # User supplied a module object.
+        if isinstance(module, types.ModuleType):
+            ldict = module.__dict__
+        elif isinstance(module, _INSTANCETYPE):
+            _items = [(k,getattr(module,k)) for k in dir(module)]
+            ldict = { }
+            for i in _items:
+                ldict[i[0]] = i[1]
+        else:
+            raise ValueError,"Expected a module"
+        
+    else:
+        # No module given.  We might be able to get information from the caller.
+        # Throw an exception and unwind the traceback to get the globals
+        
+        try:
+            raise RuntimeError
+        except RuntimeError:
+            e,b,t = sys.exc_info()
+            f = t.tb_frame
+            f = f.f_back           # Walk out to our calling function
+            ldict = f.f_globals    # Grab its globals dictionary
+
+    # Add starting symbol to signature
+    if not start:
+        start = ldict.get("start",None)
+    if start:
+        Signature.update(start)
+
+    # If running in optimized mode.  We're going to
+
+    if (optimize and lr_read_tables(tabmodule,1)):
+        # Read parse table
+        del Productions[:]
+        for p in _lr_productions:
+            if not p:
+                Productions.append(None)
+            else:
+                m = MiniProduction()
+                m.name = p[0]
+                m.len  = p[1]
+                m.file = p[3]
+                m.line = p[4]
+                if p[2]:
+                    m.func = ldict[p[2]]
+                Productions.append(m)
+        
+    else:
+        # Get the tokens map
+        if (module and isinstance(module,_INSTANCETYPE)):
+            tokens = getattr(module,"tokens",None)
+        else:
+            tokens = ldict.get("tokens",None)
+    
+        if not tokens:
+            raise YaccError,"module does not define a list 'tokens'"
+        if not (isinstance(tokens,types.ListType) or isinstance(tokens,types.TupleType)):
+            raise YaccError,"tokens must be a list or tuple."
+
+        # Check to see if a requires dictionary is defined.
+        requires = ldict.get("require",None)
+        if requires:
+            if not (isinstance(requires,types.DictType)):
+                raise YaccError,"require must be a dictionary."
+
+            for r,v in requires.items():
+                try:
+                    if not (isinstance(v,types.ListType)):
+                        raise TypeError
+                    v1 = [x.split(".") for x in v]
+                    Requires[r] = v1
+                except StandardError:
+                    print "Invalid specification for rule '%s' in require. Expected a list of strings" % r            
+
+        
+        # Build the dictionary of terminals.  We a record a 0 in the
+        # dictionary to track whether or not a terminal is actually
+        # used in the grammar
+
+        if 'error' in tokens:
+            print "yacc: Illegal token 'error'.  Is a reserved word."
+            raise YaccError,"Illegal token name"
+
+        for n in tokens:
+            if Terminals.has_key(n):
+                print "yacc: Warning. Token '%s' multiply defined." % n
+            Terminals[n] = [ ]
+
+        Terminals['error'] = [ ]
+
+        # Get the precedence map (if any)
+        prec = ldict.get("precedence",None)
+        if prec:
+            if not (isinstance(prec,types.ListType) or isinstance(prec,types.TupleType)):
+                raise YaccError,"precedence must be a list or tuple."
+            add_precedence(prec)
+            Signature.update(repr(prec))
+
+        for n in tokens:
+            if not Precedence.has_key(n):
+                Precedence[n] = ('right',0)         # Default, right associative, 0 precedence
+
+        # Look for error handler
+        ef = ldict.get('p_error',None)
+        if ef:
+            if isinstance(ef,types.FunctionType):
+                ismethod = 0
+            elif isinstance(ef, types.MethodType):
+                ismethod = 1
+            else:
+                raise YaccError,"'p_error' defined, but is not a function or method."                
+            eline = ef.func_code.co_firstlineno
+            efile = ef.func_code.co_filename
+            files[efile] = None
+
+            if (ef.func_code.co_argcount != 1+ismethod):
+                raise YaccError,"%s:%d: p_error() requires 1 argument." % (efile,eline)
+            global Errorfunc
+            Errorfunc = ef
+        else:
+            print "yacc: Warning. no p_error() function is defined."
+            
+        # Get the list of built-in functions with p_ prefix
+        symbols = [ldict[f] for f in ldict.keys()
+               if (type(ldict[f]) in (types.FunctionType, types.MethodType) and ldict[f].__name__[:2] == 'p_'
+                   and ldict[f].__name__ != 'p_error')]
+
+        # Check for non-empty symbols
+        if len(symbols) == 0:
+            raise YaccError,"no rules of the form p_rulename are defined."
+    
+        # Sort the symbols by line number
+        symbols.sort(lambda x,y: cmp(x.func_code.co_firstlineno,y.func_code.co_firstlineno))
+
+        # Add all of the symbols to the grammar
+        for f in symbols:
+            if (add_function(f)) < 0:
+                error += 1
+            else:
+                files[f.func_code.co_filename] = None
+
+        # Make a signature of the docstrings
+        for f in symbols:
+            if f.__doc__:
+                Signature.update(f.__doc__)
+    
+        lr_init_vars()
+
+        if error:
+            raise YaccError,"Unable to construct parser."
+
+        if not lr_read_tables(tabmodule):
+
+            # Validate files
+            for filename in files.keys():
+                if not validate_file(filename):
+                    error = 1
+
+            # Validate dictionary
+            validate_dict(ldict)
+
+            if start and not Prodnames.has_key(start):
+                raise YaccError,"Bad starting symbol '%s'" % start
+        
+            augment_grammar(start)    
+            error = verify_productions(cycle_check=check_recursion)
+            otherfunc = [ldict[f] for f in ldict.keys()
+               if (type(f) in (types.FunctionType,types.MethodType) and ldict[f].__name__[:2] != 'p_')]
+
+            if error:
+                raise YaccError,"Unable to construct parser."
+            
+            build_lritems()
+            compute_first1()
+            compute_follow(start)
+        
+            if method in ['SLR','LALR']:
+                lr_parse_table(method)
+            else:
+                raise YaccError, "Unknown parsing method '%s'" % method
+
+            if write_tables:
+                lr_write_tables(tabmodule,outputdir)        
+    
+            if yaccdebug:
+                try:
+                    f = open(os.path.join(outputdir,debugfile),"w")
+                    f.write(_vfc.getvalue())
+                    f.write("\n\n")
+                    f.write(_vf.getvalue())
+                    f.close()
+                except IOError,e:
+                    print "yacc: can't create '%s'" % debugfile,e
+        
+    # Made it here.   Create a parser object and set up its internal state.
+    # Set global parse() method to bound method of parser object.
+
+    g = ParserPrototype("xyzzy")
+    g.productions = Productions
+    g.errorfunc = Errorfunc
+    g.action = _lr_action
+    g.goto   = _lr_goto
+    g.method = _lr_method
+    g.require = Requires
+
+    global parser
+    parser = g.init_parser()
+
+    global parse
+    parse = parser.parse
+
+    # Clean up all of the globals we created
+    if (not optimize):
+        yacc_cleanup()
+    return g
+
+# <ah> Allow multiple instances of parser
+class ParserPrototype(object):
+    def __init__(self, magic=None):
+        if magic != "xyzzy":
+            raise YaccError, 'Use yacc()'
+
+    def init_parser(self, parser=None):
+        if not parser:
+            parser = Parser()
+        parser.productions = self.productions
+        parser.errorfunc = self.errorfunc
+        parser.action = self.action
+        parser.goto   = self.goto
+        parser.method = self.method
+        parser.require = self.require
+        return parser
+
+# yacc_cleanup function.  Delete all of the global variables
+# used during table construction
+
+def yacc_cleanup():
+    global _lr_action, _lr_goto, _lr_method, _lr_goto_cache
+    del _lr_action, _lr_goto, _lr_method, _lr_goto_cache
+
+    global Productions, Prodnames, Prodmap, Terminals 
+    global Nonterminals, First, Follow, Precedence, LRitems
+    global Errorfunc, Signature, Requires
+    
+    del Productions, Prodnames, Prodmap, Terminals
+    del Nonterminals, First, Follow, Precedence, LRitems
+    del Errorfunc, Signature, Requires
+    
+    global _vf, _vfc
+    del _vf, _vfc
+    
+    
+# Stub that raises an error if parsing is attempted without first calling yacc()
+def parse(*args,**kwargs):
+    raise YaccError, "yacc: No parser built with yacc()"
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/__init__.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/__init__.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/__init__.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+
+"""
+This module is the backend to ctypesgen; it contains classes to
+produce the final .py output files.
+"""
+
+from printer import WrapperPrinter
+
+__all__ = ["WrapperPrinter"]
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/defaultheader.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/defaultheader.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/defaultheader.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,9 @@
+'''Wrapper for %(name)s
+
+Generated with:
+%(argv)s
+
+Do not modify this file.
+'''
+
+__docformat__ =  'restructuredtext'

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/preamble.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/preamble.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/preamble.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,290 @@
+import ctypes, os, sys
+from ctypes import *
+
+_int_types = (c_int16, c_int32)
+if hasattr(ctypes, 'c_int64'):
+    # Some builds of ctypes apparently do not have c_int64
+    # defined; it's a pretty good bet that these builds do not
+    # have 64-bit pointers.
+    _int_types += (c_int64,)
+for t in _int_types:
+    if sizeof(t) == sizeof(c_size_t):
+        c_ptrdiff_t = t
+del t
+del _int_types
+
+class c_void(Structure):
+    # c_void_p is a buggy return type, converting to int, so
+    # POINTER(None) == c_void_p is actually written as
+    # POINTER(c_void), so it can be treated as a real pointer.
+    _fields_ = [('dummy', c_int)]
+
+def POINTER(obj):
+    p = ctypes.POINTER(obj)
+
+    # Convert None to a real NULL pointer to work around bugs
+    # in how ctypes handles None on 64-bit platforms
+    if not isinstance(p.from_param, classmethod):
+        def from_param(cls, x):
+            if x is None:
+                return cls()
+            else:
+                return x
+        p.from_param = classmethod(from_param)
+
+    return p
+
+class UserString:
+    def __init__(self, seq):
+        if isinstance(seq, basestring):
+            self.data = seq
+        elif isinstance(seq, UserString):
+            self.data = seq.data[:]
+        else:
+            self.data = str(seq)
+    def __str__(self): return str(self.data)
+    def __repr__(self): return repr(self.data)
+    def __int__(self): return int(self.data)
+    def __long__(self): return long(self.data)
+    def __float__(self): return float(self.data)
+    def __complex__(self): return complex(self.data)
+    def __hash__(self): return hash(self.data)
+
+    def __cmp__(self, string):
+        if isinstance(string, UserString):
+            return cmp(self.data, string.data)
+        else:
+            return cmp(self.data, string)
+    def __contains__(self, char):
+        return char in self.data
+
+    def __len__(self): return len(self.data)
+    def __getitem__(self, index): return self.__class__(self.data[index])
+    def __getslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        return self.__class__(self.data[start:end])
+
+    def __add__(self, other):
+        if isinstance(other, UserString):
+            return self.__class__(self.data + other.data)
+        elif isinstance(other, basestring):
+            return self.__class__(self.data + other)
+        else:
+            return self.__class__(self.data + str(other))
+    def __radd__(self, other):
+        if isinstance(other, basestring):
+            return self.__class__(other + self.data)
+        else:
+            return self.__class__(str(other) + self.data)
+    def __mul__(self, n):
+        return self.__class__(self.data*n)
+    __rmul__ = __mul__
+    def __mod__(self, args):
+        return self.__class__(self.data % args)
+
+    # the following methods are defined in alphabetical order:
+    def capitalize(self): return self.__class__(self.data.capitalize())
+    def center(self, width, *args):
+        return self.__class__(self.data.center(width, *args))
+    def count(self, sub, start=0, end=sys.maxint):
+        return self.data.count(sub, start, end)
+    def decode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.decode(encoding, errors))
+            else:
+                return self.__class__(self.data.decode(encoding))
+        else:
+            return self.__class__(self.data.decode())
+    def encode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.encode(encoding, errors))
+            else:
+                return self.__class__(self.data.encode(encoding))
+        else:
+            return self.__class__(self.data.encode())
+    def endswith(self, suffix, start=0, end=sys.maxint):
+        return self.data.endswith(suffix, start, end)
+    def expandtabs(self, tabsize=8):
+        return self.__class__(self.data.expandtabs(tabsize))
+    def find(self, sub, start=0, end=sys.maxint):
+        return self.data.find(sub, start, end)
+    def index(self, sub, start=0, end=sys.maxint):
+        return self.data.index(sub, start, end)
+    def isalpha(self): return self.data.isalpha()
+    def isalnum(self): return self.data.isalnum()
+    def isdecimal(self): return self.data.isdecimal()
+    def isdigit(self): return self.data.isdigit()
+    def islower(self): return self.data.islower()
+    def isnumeric(self): return self.data.isnumeric()
+    def isspace(self): return self.data.isspace()
+    def istitle(self): return self.data.istitle()
+    def isupper(self): return self.data.isupper()
+    def join(self, seq): return self.data.join(seq)
+    def ljust(self, width, *args):
+        return self.__class__(self.data.ljust(width, *args))
+    def lower(self): return self.__class__(self.data.lower())
+    def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
+    def partition(self, sep):
+        return self.data.partition(sep)
+    def replace(self, old, new, maxsplit=-1):
+        return self.__class__(self.data.replace(old, new, maxsplit))
+    def rfind(self, sub, start=0, end=sys.maxint):
+        return self.data.rfind(sub, start, end)
+    def rindex(self, sub, start=0, end=sys.maxint):
+        return self.data.rindex(sub, start, end)
+    def rjust(self, width, *args):
+        return self.__class__(self.data.rjust(width, *args))
+    def rpartition(self, sep):
+        return self.data.rpartition(sep)
+    def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
+    def split(self, sep=None, maxsplit=-1):
+        return self.data.split(sep, maxsplit)
+    def rsplit(self, sep=None, maxsplit=-1):
+        return self.data.rsplit(sep, maxsplit)
+    def splitlines(self, keepends=0): return self.data.splitlines(keepends)
+    def startswith(self, prefix, start=0, end=sys.maxint):
+        return self.data.startswith(prefix, start, end)
+    def strip(self, chars=None): return self.__class__(self.data.strip(chars))
+    def swapcase(self): return self.__class__(self.data.swapcase())
+    def title(self): return self.__class__(self.data.title())
+    def translate(self, *args):
+        return self.__class__(self.data.translate(*args))
+    def upper(self): return self.__class__(self.data.upper())
+    def zfill(self, width): return self.__class__(self.data.zfill(width))
+
+class MutableString(UserString):
+    """mutable string objects
+
+    Python strings are immutable objects.  This has the advantage, that
+    strings may be used as dictionary keys.  If this property isn't needed
+    and you insist on changing string values in place instead, you may cheat
+    and use MutableString.
+
+    But the purpose of this class is an educational one: to prevent
+    people from inventing their own mutable string class derived
+    from UserString and than forget thereby to remove (override) the
+    __hash__ method inherited from UserString.  This would lead to
+    errors that would be very hard to track down.
+
+    A faster and better solution is to rewrite your program using lists."""
+    def __init__(self, string=""):
+        self.data = string
+    def __hash__(self):
+        raise TypeError, "unhashable type (it is mutable)"
+    def __setitem__(self, index, sub):
+        if index < 0:
+            index += len(self.data)
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + sub + self.data[index+1:]
+    def __delitem__(self, index):
+        if index < 0:
+            index += len(self.data)
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + self.data[index+1:]
+    def __setslice__(self, start, end, sub):
+        start = max(start, 0); end = max(end, 0)
+        if isinstance(sub, UserString):
+            self.data = self.data[:start]+sub.data+self.data[end:]
+        elif isinstance(sub, basestring):
+            self.data = self.data[:start]+sub+self.data[end:]
+        else:
+            self.data =  self.data[:start]+str(sub)+self.data[end:]
+    def __delslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        self.data = self.data[:start] + self.data[end:]
+    def immutable(self):
+        return UserString(self.data)
+    def __iadd__(self, other):
+        if isinstance(other, UserString):
+            self.data += other.data
+        elif isinstance(other, basestring):
+            self.data += other
+        else:
+            self.data += str(other)
+        return self
+    def __imul__(self, n):
+        self.data *= n
+        return self
+
+class String(MutableString, Union):
+
+    _fields_ = [('raw', POINTER(c_char)),
+                ('data', c_char_p)]
+
+    def __init__(self, obj=""):
+        if isinstance(obj, (str, unicode, UserString)):
+            self.data = str(obj)
+        else:
+            self.raw = obj
+
+    def __len__(self):
+        return self.data and len(self.data) or 0
+    
+    def from_param(cls, obj):
+        # Convert None or 0
+        if obj is None or obj == 0:
+            return cls(POINTER(c_char)())
+
+        # Convert from String
+        elif isinstance(obj, String):
+            return obj
+
+        # Convert from str
+        elif isinstance(obj, str):
+            return cls(obj)
+        
+        # Convert from c_char_p
+        elif isinstance(obj, c_char_p):
+            return obj
+        
+        # Convert from POINTER(c_char)
+        elif isinstance(obj, POINTER(c_char)):
+            return obj
+        
+        # Convert from raw pointer
+        elif isinstance(obj, int):
+            return cls(cast(obj, POINTER(c_char)))
+
+        # Convert from object
+        else:
+            return String.from_param(obj._as_parameter_)
+    from_param = classmethod(from_param)
+
+def ReturnString(obj):
+    return String.from_param(obj)
+
+# As of ctypes 1.0, ctypes does not support custom error-checking
+# functions on callbacks, nor does it support custom datatypes on
+# callbacks, so we must ensure that all callbacks return
+# primitive datatypes.
+#
+# Non-primitive return values wrapped with UNCHECKED won't be
+# typechecked, and will be converted to c_void_p.
+def UNCHECKED(type):
+    if (hasattr(type, "_type_") and isinstance(type._type_, str)
+        and type._type_ != "P"):
+        return type
+    else:
+        return c_void_p
+
+# ctypes doesn't have direct support for variadic functions, so we have to write
+# our own wrapper class
+class _variadic_function(object):
+    def __init__(self,func,restype,argtypes):
+        self.func=func
+        self.func.restype=restype
+        self.argtypes=argtypes
+    def _as_parameter_(self):
+        # So we can pass this variadic function as a function pointer
+        return self.func
+    def __call__(self,*args):
+        fixed_args=[]
+        i=0
+        for argtype in self.argtypes:
+            # Typecheck what we can
+            fixed_args.append(argtype.from_param(args[i]))
+            i+=1
+        return self.func(*fixed_args+list(args[i:]))
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/printer.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/printer.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/printer.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,298 @@
+#!/usr/bin/env python
+
+import os, sys, time
+from ctypesgencore.descriptions import *
+from ctypesgencore.ctypedescs import *
+from ctypesgencore.messages import *
+
+import ctypesgencore.libraryloader # So we can get the path to it
+import test # So we can find the path to local files in the printer package
+
+def path_to_local_file(name,known_local_module = test):
+    basedir=os.path.dirname(known_local_module.__file__)
+    return os.path.join(basedir,name)
+
+class WrapperPrinter:
+    def __init__(self,outpath,options,data):
+        status_message("Writing to %s." % outpath)
+        
+        self.file=file(outpath,"w")
+        self.options=options
+
+        if self.options.strip_build_path and \
+          self.options.strip_build_path[-1] != os.path.sep:
+            self.options.strip_build_path += os.path.sep
+        
+        self.print_header()
+        print >>self.file
+        
+        self.print_preamble()
+        print >>self.file
+        
+        self.print_loader()
+        print >>self.file
+                
+        self.print_group(self.options.libraries,"libraries",self.print_library)
+        self.print_group(self.options.modules,"modules",self.print_module)
+        
+        method_table = {
+            'function': self.print_function,
+            'macro': self.print_macro,
+            'struct': self.print_struct,
+            'struct-body': self.print_struct_members,
+            'typedef': self.print_typedef,
+            'variable': self.print_variable,
+            'enum': self.print_enum,
+            'constant': self.print_constant
+        }
+        
+        for kind,desc in data.output_order:
+            if desc.included:
+                method_table[kind](desc)
+                print >>self.file
+        
+        self.print_group(self.options.inserted_files,"inserted files",
+                         self.insert_file)
+    
+    def print_group(self,list,name,function):
+        if list:
+            print >>self.file,"# Begin %s" % name
+            print >>self.file
+            for obj in list:
+                function(obj)
+            print >>self.file
+            print >>self.file,"# %d %s" % (len(list),name)
+            print >>self.file,"# End %s" % name
+        else:
+            print >>self.file,"# No %s" % name
+        print >>self.file
+    
+    def srcinfo(self,src):
+        if src==None:
+            print >>self.file
+        else:
+            filename,lineno = src
+            if filename in ("<built-in>","<command line>"):
+                print >>self.file, "# %s" % filename
+            else:
+                if self.options.strip_build_path and \
+                  filename.startswith(self.options.strip_build_path):
+                    filename = filename[len(self.options.strip_build_path):]
+                print >>self.file, "# %s: %s" % (filename, lineno)
+    
+    def template_subs(self):
+        template_subs={
+            'date': time.ctime(),
+            'argv': ' '.join([x for x in sys.argv if not x.startswith("--strip-build-path")]),
+            'name': os.path.basename(self.options.headers[0])
+        }
+        
+        for opt,value in self.options.__dict__.iteritems():
+            if type(value)==str:
+                template_subs[opt]=value
+            elif isinstance(value,(list,tuple)):
+                template_subs[opt]=(os.path.sep).join(value)
+            else:
+                template_subs[opt]=repr(value)
+        
+        return template_subs
+    
+    def print_header(self):
+        template_file = None
+        
+        if self.options.header_template:
+            path = self.options.header_template
+            try:
+                template_file = file(path,"r")
+            except IOError:
+                error_message("Cannot load header template from file \"%s\" " \
+                    " - using default template." % path, cls = 'missing-file')
+        
+        if not template_file:
+            path = path_to_local_file("defaultheader.py")
+            template_file = file(path,"r")
+        
+        template_subs=self.template_subs()
+        self.file.write(template_file.read() % template_subs)
+        
+        template_file.close()
+    
+    def print_preamble(self):
+        path = path_to_local_file("preamble.py")
+        
+        print >>self.file, "# Begin preamble"
+        print >>self.file
+        preamble_file=file(path,"r")
+        self.file.write(preamble_file.read())
+        preamble_file.close()
+        print >>self.file
+        print >>self.file, "# End preamble"
+    
+    def print_loader(self):
+        print >>self.file, "_libs = {}"
+        print >>self.file, "_libdirs = %s" % self.options.compile_libdirs
+        print >>self.file
+        print >>self.file, "# Begin loader"
+        print >>self.file
+        path = path_to_local_file("libraryloader.py",
+                                      ctypesgencore.libraryloader)
+        loader_file=file(path,"r")
+        self.file.write(loader_file.read())
+        loader_file.close()
+        print >>self.file
+        print >>self.file, "# End loader"
+        print >>self.file
+        print >>self.file, "add_library_search_dirs([%s])" % \
+                ", ".join([repr(d) for d in self.options.runtime_libdirs])
+    
+    def print_library(self,library):
+        print >>self.file, '_libs["%s"] = load_library("%s")'%(library,library)
+    
+    def print_module(self,module):
+        print >>self.file, 'from %s import *' % name
+    
+    def print_constant(self,constant):
+        print >>self.file, '%s = %s' % \
+            (constant.name,constant.value.py_string(False)),
+        self.srcinfo(constant.src)
+    
+    def print_typedef(self,typedef):
+        print >>self.file, '%s = %s' % \
+            (typedef.name,typedef.ctype.py_string()),
+        self.srcinfo(typedef.src)
+    
+    def print_struct(self, struct):
+        self.srcinfo(struct.src)
+        base = {'union': 'Union', 'struct': 'Structure'}[struct.variety]
+        print >>self.file, 'class %s_%s(%s):' % \
+            (struct.variety, struct.tag, base)
+        print >>self.file, '    pass'
+    
+    def print_struct_members(self, struct):
+        if struct.opaque: return
+        print >>self.file, '%s_%s.__slots__ = [' % (struct.variety, struct.tag)
+        for name,ctype in struct.members:
+            print >>self.file, "    '%s'," % name
+        print >>self.file, ']'
+        print >>self.file, '%s_%s._fields_ = [' % (struct.variety, struct.tag)
+        for name,ctype in struct.members:
+            if isinstance(ctype,CtypesBitfield):
+                print >>self.file, "    ('%s', %s, %s)," % \
+                    (name, ctype.py_string(), ctype.bitfield.py_string(False))
+            else:
+                print >>self.file, "    ('%s', %s)," % (name, ctype.py_string())
+        print >>self.file, ']'
+    
+    def print_enum(self,enum):
+        print >>self.file, 'enum_%s = c_int' % enum.tag,
+        self.srcinfo(enum.src)
+        # Values of enumerator are output as constants.
+    
+    def print_function(self, function):
+        if function.variadic:
+            self.print_variadic_function(function)
+        else:
+            self.print_fixed_function(function)
+    
+    def print_fixed_function(self, function):
+        self.srcinfo(function.src)
+        if function.source_library:
+            print >>self.file, "if hasattr(_libs[%r], %r):" % \
+                (function.source_library,function.c_name())
+            print >>self.file, "    %s = _libs[%r].%s" % \
+                (function.py_name(),function.source_library,function.c_name())
+            print >>self.file, "    %s.restype = %s" % \
+                (function.py_name(),function.restype.py_string())
+            print >>self.file, "    %s.argtypes = [%s]" % (function.py_name(),
+                ', '.join([a.py_string() for a in function.argtypes]))
+        else:
+            print >>self.file, "for _lib in _libs.values():"
+            print >>self.file, "    if hasattr(_lib, %r):" % function.c_name()
+            print >>self.file, "        %s = _lib.%s" % (function.py_name(),function.c_name())
+            print >>self.file, "        %s.restype = %s" % (function.py_name(),function.restype.py_string())
+            print >>self.file, "        %s.argtypes = [%s]" % (function.py_name(),
+                ', '.join([a.py_string() for a in function.argtypes]))
+            print >>self.file, "        break"
+    
+    def print_variadic_function(self,function):
+        self.srcinfo(function.src)
+        if function.source_library:
+            print >>self.file, "if hasattr(_libs[%r], %r):" % \
+                (function.source_library,function.c_name())
+            print >>self.file, "    _func = _libs[%r].%s" % \
+                (function.source_library,function.c_name())
+            print >>self.file, "    _restype = %s" % function.restype.py_string()
+            print >>self.file, "    _argtypes = [%s]" % \
+                ', '.join([a.py_string() for a in function.argtypes])
+            print >>self.file, "    %s = _variadic_function(_func,_restype,_argtypes)" % \
+                function.py_name()
+        else:
+            print >>self.file, "for _lib in _libs.values():"
+            print >>self.file, "    if hasattr(_lib, %r):" % function.c_name()
+            print >>self.file, "        _func = _lib.%s" % \
+                (function.c_name())
+            print >>self.file, "        _restype = %s" % function.restype.py_string()
+            print >>self.file, "        _argtypes = [%s]" % \
+                ', '.join([a.py_string() for a in function.argtypes])
+            print >>self.file, "        %s = _variadic_function(_func,_restype,_argtypes)" % \
+                function.py_name()
+
+    
+    def print_variable(self, variable):
+        self.srcinfo(variable.src)
+        if variable.source_library:
+            print >>self.file, 'try:'
+            print >>self.file, '    %s = (%s).in_dll(_libs[%r], %r)' % \
+                (variable.py_name(),
+                 variable.ctype.py_string(),
+                 variable.source_library,
+                 variable.c_name())
+            print >>self.file, 'except:'
+            print >>self.file, '    pass'
+        else:
+            print >>self.file, "for _lib in _libs.values():"
+            print >>self.file, '    try:'
+            print >>self.file, '        %s = (%s).in_dll(_lib, %r)' % \
+                (variable.py_name(),
+                 variable.ctype.py_string(),
+                 variable.c_name())
+            print >>self.file, "        break"
+            print >>self.file, '    except:'
+            print >>self.file, '        pass'
+    
+    def print_macro(self, macro):
+        if macro.params:
+            self.print_func_macro(macro)
+        else:
+            self.print_simple_macro(macro)
+    
+    def print_simple_macro(self, macro):
+        # The macro translator makes heroic efforts but it occasionally fails.
+        # We want to contain the failures as much as possible.
+        # Hence the try statement.
+        self.srcinfo(macro.src)
+        print >>self.file, "try:"
+        print >>self.file, "    %s = %s" % (macro.name,macro.expr.py_string(True))
+        print >>self.file, "except:"
+        print >>self.file, "    pass"
+    
+    def print_func_macro(self, macro):
+        self.srcinfo(macro.src)
+        print >>self.file, "def %s(%s):" % \
+            (macro.name,", ".join(macro.params))
+        print >>self.file, "    return %s" % macro.expr.py_string(True)
+    
+    def insert_file(self,filename):
+        try:
+            inserted_file = file(filename,"r")
+        except IOError:
+            error_message("Cannot open file \"%s\". Skipped it." % filename,
+                          cls = 'missing-file')
+        
+        print >>self.file,"# Begin \"%s\"" % filename
+        print >>self.file
+        self.file.write(inserted_file.read())
+        print >>self.file
+        print >>self.file,"# End \"%s\"" % filename
+              
+        inserted_file.close()

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/test.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/test.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/printer/test.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,6 @@
+"""
+ctypesgencore.printer.printer imports this module so that it can find the path
+to defaulttemplate.py and defaultloader.py.
+"""
+
+pass
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/__init__.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/__init__.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/__init__.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+"""
+This module contains functions to operate on the DeclarationCollection produced
+by the parser module and prepare it for output.
+
+A convenience_function, process(), calls everything else.
+"""
+
+__all__ = ["process"]
+
+from pipeline import process
\ No newline at end of file

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/dependencies.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/dependencies.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/dependencies.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+
+"""
+The dependencies module determines which descriptions depend on which other
+descriptions.
+"""
+
+from ctypesgencore.descriptions import *
+from ctypesgencore.ctypedescs import *
+from ctypesgencore.messages import *
+
+def find_dependencies(data, opts):
+    """Visit each description in `data` and figure out which other descriptions
+it depends on, putting the results in desc.requirements. Also find errors in
+ctypedecls or expressions attached to the description and transfer them to the
+description."""
+    
+    struct_names = {}
+    enum_names = {}
+    typedef_names = {}
+    ident_names = {}
+    
+    # Start the lookup tables with names from imported modules
+    
+    for name in opts.other_known_names:
+        typedef_names[name] = None
+        ident_names[name] = None
+        if name.startswith("struct_") or name.startswith("enum_"):
+            variety = name.split("_")[0]
+            tag = "_".join(name.split("_")[1:])
+            struct_names[(variety,tag)] = None
+        if name.startswith("enum_"):
+            enum_names[name] = None
+    
+    def depend(desc, nametable, name):
+        """Try to add `name` as a requirement for `desc`, looking `name` up in
+`nametable`. Returns True if found."""
+
+        if name in nametable:
+            requirement = nametable[name]
+            if requirement: desc.add_requirements([requirement])
+            return True
+        else:
+            return False
+    
+    def find_dependencies_for(desc, kind):
+        """Find all the descriptions that `desc` depends on and add them as
+dependencies for `desc`. Also collect error messages regarding `desc` and 
+convert unlocateable descriptions into error messages."""
+
+        if kind == "constant": roots = [desc.value]
+        if kind == "struct": roots = []
+        if kind == "struct-body": roots = [desc.ctype]
+        if kind == "enum": roots = []
+        if kind == "typedef": roots = [desc.ctype]
+        if kind == "function": roots = desc.argtypes + [desc.restype]
+        if kind == "variable": roots = [desc.ctype]
+        if kind == "macro":
+            if desc.expr: roots = [desc.expr]
+            else: roots = []
+        
+        cstructs,cenums,ctypedefs,errors,identifiers = [], [], [], [], []
+        
+        for root in roots:
+            s, e, t, errs, i = visit_type_and_collect_info(root)
+            cstructs.extend(s)
+            cenums.extend(e)
+            ctypedefs.extend(t)
+            errors.extend(errs)
+            identifiers.extend(i)
+        
+        unresolvables = []
+        
+        for cstruct in cstructs:
+            if kind == "struct" and desc.variety == cstruct.variety and \
+                desc.tag == cstruct.tag:
+                continue
+            if not depend(desc, struct_names, (cstruct.variety, cstruct.tag)):
+                unresolvables.append("%s \"%s\"" % \
+                    (cstruct.variety, cstruct.tag))
+        
+        for cenum in cenums:
+            if kind == "enum" and desc.tag == cenum.tag:
+                continue
+            if not depend(desc, enum_names, cenum.tag):
+                unresolvables.append("enum \"%s\"" % cenum.tag)
+        
+        for ctypedef in ctypedefs:
+            if not depend(desc, typedef_names, ctypedef):
+                unresolvables.append("typedef \"%s\"" % ctypedef)
+        
+        for ident in identifiers:
+            if isinstance(desc, MacroDescription) and \
+                desc.params and ident in desc.params:
+                continue
+            if not depend(desc, ident_names, ident):
+                unresolvables.append("identifier \"%s\"" % ident)
+        
+        for u in unresolvables:
+            errors.append(("%s depends on an unknown %s." % \
+                          (desc.casual_name(), u), None))
+        
+        for err, cls in errors:
+            err += " %s will not be output" % desc.casual_name()
+            desc.error(err, cls = cls)
+        
+    def add_to_lookup_table(desc, kind):
+        """Add `desc` to the lookup table so that other descriptions that use
+it can find it."""
+        if kind == "struct":
+            if (desc.variety, desc.tag) not in struct_names:
+                struct_names[(desc.variety, desc.tag)] = desc
+        if kind == "enum":
+            if desc.tag not in enum_names:
+                enum_names[desc.tag] = desc
+        if kind == "typedef":
+            if desc.name not in typedef_names:
+                typedef_names[desc.name] = desc
+        if kind in ("function", "constant", "variable", "macro"):
+            if desc.name not in ident_names:
+                ident_names[desc.name] = desc
+
+    # Macros are handled differently from everything else because macros can
+    # call other macros that are referenced after them in the input file, but
+    # no other type of description can look ahead like that.
+
+    for kind, desc in data.output_order:
+        if kind!="macro":
+            find_dependencies_for(desc, kind)
+            add_to_lookup_table(desc, kind)
+
+    for kind, desc in data.output_order:
+        if kind=="macro":
+            add_to_lookup_table(desc, kind)
+    for kind, desc in data.output_order:
+        if kind=="macro":
+            find_dependencies_for(desc, kind)

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/operations.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/operations.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/operations.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+
+"""
+The operations module contains various functions to process the
+DescriptionCollection and prepare it for output.
+ctypesgencore.processor.pipeline calls the operations module.
+"""
+
+import ctypes, re, os, sys, keyword
+from ctypesgencore.descriptions import *
+from ctypesgencore.messages import *
+import ctypesgencore.libraryloader
+
+# Processor functions
+
+def automatically_typedef_structs(data,options):
+    """automatically_typedef_structs() aliases "struct_<tag>" to "<tag>" for
+    every struct and union."""
+    # XXX Check if it has already been aliased in the C code.
+    
+    for struct in data.structs:
+        if not struct.ctype.anonymous: # Don't alias anonymous structs
+            typedef=TypedefDescription(struct.tag,
+                                       struct.ctype,
+                                       src=struct.src)
+            typedef.add_requirements(set([struct]))
+            
+            data.typedefs.append(typedef)
+            data.all.insert(data.all.index(struct)+1,typedef)
+            data.output_order.append(("typedef", typedef))
+
+def remove_NULL(data, options):
+    """remove_NULL() removes any NULL definitions from the C headers because
+ctypesgen supplies its own NULL definition."""
+    
+    for macro in data.macros:
+        if macro.name=="NULL":
+            macro.include_rule = "never"
+
+def remove_descriptions_in_system_headers(data,opts):
+    """remove_descriptions_in_system_headers() removes descriptions if they came
+    from files outside of the header files specified from the command line."""
+    
+    known_headers = [os.path.basename(x) for x in opts.headers]
+    
+    for description in data.all:
+        if description.src!=None:
+            if description.src[0] == "<command line>":
+                description.include_rule = "if_needed"
+            elif description.src[0] == "<built-in>":
+                if not opts.builtin_symbols:
+                    description.include_rule="if_needed"
+            elif os.path.basename(description.src[0]) not in known_headers:
+                if not opts.all_headers:
+                    # If something else requires this, include it even though
+                    # it is in a system header file.
+                    description.include_rule="if_needed"
+
+def remove_macros(data,opts):
+    """remove_macros() removes macros if --no-macros is set."""
+    if not opts.include_macros:
+        for macro in data.macros:
+            macro.include_rule = "never"
+
+def filter_by_regexes_exclude(data,opts):
+    """filter_by_regexes_exclude() uses regular expressions specified by options
+    dictionary to filter symbols."""
+    if opts.exclude_symbols:
+        expr=re.compile(opts.exclude_symbols)
+        for object in data.all:
+            if expr.match(object.py_name()):
+                object.include_rule="never"
+
+def filter_by_regexes_include(data,opts):
+    """filter_by_regexes_include() uses regular expressions specified by options
+    dictionary to re-include symbols previously rejected by other operations."""
+    if opts.include_symbols:
+        expr=re.compile(opts.include_symbols)
+        for object in data.all:
+            if object.include_rule!="never":
+                if expr.match(object.py_name()):
+                    object.include_rule="yes"
+
+def fix_conflicting_names(data,opts):
+    """If any descriptions from the C code would overwrite Python builtins or
+    other important names, fix_conflicting_names() adds underscores to resolve
+    the name conflict."""
+    
+    # This is the order of priority for names
+    descriptions = data.functions + data.variables + data.structs + \
+        data.typedefs + data.enums + data.constants + data.macros
+    
+    # This dictionary maps names to a string representing where the name
+    # came from.
+    important_names={}
+    
+    preamble_names=set()
+    preamble_names=preamble_names.union(['DarwinLibraryLoader',
+        'LibraryLoader', 'LinuxLibraryLoader', 'WindowsLibraryLoader',
+        '_WindowsLibrary', 'add_library_search_dirs', '_environ_path', 'ctypes',
+        'load_library', 'loader', 'os', 're', 'sys'])
+    preamble_names=preamble_names.union(['ArgumentError', 'CFUNCTYPE',
+        'POINTER', 'ReturnString', 'String', 'Structure', 'UNCHECKED', 'Union',
+        'UserString', '_variadic_function', 'addressof', 'c_buffer', 'c_byte',
+        'c_char', 'c_char_p', 'c_double', 'c_float', 'c_int', 'c_int16',
+        'c_int32', 'c_int64', 'c_int8', 'c_long', 'c_longlong', 'c_ptrdiff_t',
+        'c_short', 'c_size_t', 'c_ubyte', 'c_uint', 'c_uint16', 'c_uint32',
+        'c_uint64', 'c_uint8', 'c_ulong', 'c_ulonglong', 'c_ushort', 'c_void',
+        'c_void_p', 'c_voidp', 'c_wchar', 'c_wchar_p', 'cast', 'ctypes', 'os',
+        'pointer', 'sizeof'])
+    for name in preamble_names:
+        important_names[name] = "a name needed by ctypes or ctypesgen"
+    for name in dir(__builtins__): important_names[name] = "a Python builtin"
+    for name in opts.other_known_names:
+        important_names[name] = "a name from an included Python module"
+    for name in keyword.kwlist: important_names[name] = "a Python keyword"
+    
+    for description in descriptions:
+        if description.py_name() in important_names:
+            conflict_name = important_names[description.py_name()]
+            
+            original_name=description.casual_name()
+            while description.py_name() in important_names:
+                if isinstance(description,
+                                (StructDescription, EnumDescription)):
+                    description.tag+="_"
+                else:
+                    description.name="_"+description.name
+            
+            if not description.dependents:
+                description.warning("%s has been renamed to %s due to a name " \
+                    "conflict with %s." % \
+                    (original_name,
+                    description.casual_name(),
+                    conflict_name),
+                    cls = 'rename')
+            else:
+                description.warning("%s has been renamed to %s due to a name " \
+                    "conflict with %s. Other objects depend on %s - those " \
+                    "objects will be skipped." % \
+                    (original_name, description.casual_name(),
+                    conflict_name, original_name),
+                    cls = 'rename')
+                
+                for dependent in description.dependents:
+                    dependent.include_rule = "never"
+            
+            if description.include_rule=="yes":
+                important_names[description.py_name()] = \
+                    description.casual_name()
+    
+    # Names of struct members don't conflict with much, but they can conflict
+    # with Python keywords.
+    
+    for struct in data.structs:
+        if not struct.opaque:
+            for i,(name,type) in enumerate(struct.members):
+                if name in keyword.kwlist:
+                    struct.members[i] = ("_"+name,type)
+                    struct.warning("Member \"%s\" of %s has been renamed to " \
+                        "\"%s\" because it has the same name as a Python " \
+                        "keyword." % (name, struct.casual_name(), "_"+name),
+                        cls = 'rename')
+    
+    # Macro arguments may be have names that conflict with Python keywords.
+    # In a perfect world, this would simply rename the parameter instead
+    # of throwing an error message.
+    
+    for macro in data.macros:
+        if macro.params:
+            for param in macro.params:
+                if param in keyword.kwlist:
+                    macro.error("One of the parameters to %s, \"%s\" has the " \
+                        "same name as a Python keyword. %s will be skipped." % \
+                        (macro.casual_name(), param, macro.casual_name()),
+                        cls = 'name-conflict')
+
+def find_source_libraries(data,opts):
+    """find_source_libraries() determines which library contains each function
+    and variable."""
+    
+    all_symbols=data.functions+data.variables
+    
+    for symbol in all_symbols:
+        symbol.source_library=None
+    
+    ctypesgencore.libraryloader.add_library_search_dirs(opts.compile_libdirs)
+    
+    for library_name in opts.libraries:
+        try:
+            library=ctypesgencore.libraryloader.load_library(library_name)
+        except ImportError,e:
+            warning_message("Could not load library \"%s\". Okay, I'll " \
+                "try to load it at runtime instead. " % (library_name),
+                cls = 'missing-library')
+            continue
+        for symbol in all_symbols:
+            if symbol.source_library==None:
+                if hasattr(library,symbol.c_name()):
+                    symbol.source_library=library_name

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/pipeline.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/pipeline.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesgencore/processor/pipeline.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+
+import ctypes, re, os
+from ctypesgencore.processor.operations import *
+from ctypesgencore.processor.dependencies import find_dependencies
+from ctypesgencore.ctypedescs import *
+from ctypesgencore.messages import *
+
+"""
+A brief explanation of the processing steps:
+1. The dependencies module builds a dependency graph for the descriptions.
+
+2. Operation functions are called to perform various operations on the
+descriptions. The operation functions are found in operations.py.
+
+3. If an operation function decides to exclude a description from the output, it
+sets 'description.include_rule' to "never"; if an operation function decides not
+to include a description by default, but to allow if required, it sets
+'description.include_rule' to "if_needed".
+
+4. If an operation function encounters an error that makes a description unfit
+for output, it appends a string error message to 'description.errors'.
+'description.warnings' is a list of warning messages that will be displayed but
+will not prevent the description from being output.
+
+5. Based on 'description.include_rule', calculate_final_inclusion() decides
+which descriptions to include in the output. It sets 'description.included' to
+True or False.
+
+6. For each description, print_errors_encountered() checks if there are error
+messages in 'description.errors'. If so, print_errors_encountered() prints the
+error messages, but only if 'description.included' is True - it doesn't bother
+the user with error messages regarding descriptions that would not be in the
+output anyway. It also prints 'description.warnings'.
+
+7. calculate_final_inclusion() is called again to recalculate based on
+the errors that print_errors_encountered() has flagged.
+
+"""
+
+def process(data,options):
+    status_message("Processing description list.")
+    
+    find_dependencies(data,options)
+    
+    automatically_typedef_structs(data,options)
+    remove_NULL(data, options)
+    remove_descriptions_in_system_headers(data,options)
+    filter_by_regexes_exclude(data,options)
+    filter_by_regexes_include(data,options)
+    remove_macros(data,options)
+    fix_conflicting_names(data,options)
+    find_source_libraries(data,options)
+        
+    calculate_final_inclusion(data,options)
+    print_errors_encountered(data,options)
+    calculate_final_inclusion(data,options)
+
+def calculate_final_inclusion(data,opts):
+    """calculate_final_inclusion() calculates which descriptions will be included in the
+    output library.
+
+    An object with include_rule="never" is never included.
+    An object with include_rule="yes" is included if its requirements can be
+        included.
+    An object with include_rule="if_needed" is included if an object to be
+        included requires it and if its requirements can be included.
+    """
+    
+    def can_include_desc(desc):
+        if desc.can_include==None:
+            if desc.include_rule=="no":
+                desc.can_include=False
+            elif desc.include_rule=="yes" or desc.include_rule=="if_needed":
+                desc.can_include=True
+                for req in desc.requirements:
+                    if not can_include_desc(req):
+                        desc.can_include=False
+        return desc.can_include
+        
+    def do_include_desc(desc):
+        if desc.included:
+            return # We've already been here
+        desc.included = True
+        for req in desc.requirements:
+            do_include_desc(req)
+    
+    for desc in data.all:
+        desc.can_include=None # None means "Not Yet Decided"
+        desc.included=False
+        
+    for desc in data.all:
+        if desc.include_rule=="yes":
+            if can_include_desc(desc):
+                do_include_desc(desc)
+
+def print_errors_encountered(data,opts):
+    # See descriptions.py for an explanation of the error-handling mechanism
+    for desc in data.all:
+        # If description would not have been included, dont bother user by
+        # printing warnings.
+        if desc.included or opts.show_all_errors:
+            if opts.show_long_errors or len(desc.errors)+len(desc.warnings)<=2:
+                for (error,cls) in desc.errors:
+                    # Macro errors will always be displayed as warnings.
+                    if isinstance(desc, MacroDescription):
+                        if opts.show_macro_warnings:
+                            warning_message(error,cls)
+                    else:
+                        error_message(error,cls)
+                for (warning,cls) in desc.warnings:
+                    warning_message(warning,cls)
+            
+            else:
+                if desc.errors:
+                    error1,cls1 = desc.errors[0]
+                    error_message(error1,cls1)
+                    numerrs = len(desc.errors)-1
+                    numwarns = len(desc.warnings)
+                    if numwarns:
+                        error_message("%d more errors and %d more warnings " \
+                            "for %s" % (numerrs,numwarns,desc.casual_name()))
+                    else:
+                        error_message("%d more errors for %s " % \
+                            (numerrs,desc.casual_name()))
+                else:
+                    warning1,cls1 = desc.warnings[0]
+                    warning_message(warning1,cls1)
+                    warning_message("%d more errors for %s" % \
+                        (len(desc.warnings)-1, desc.casual_name()))
+        if desc.errors:
+            # process() will recalculate to take this into account
+            desc.include_rule = "never"
+

Added: grass/branches/develbranch_6/lib/python/ctypes/ctypesheader.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/ctypesheader.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/ctypesheader.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,567 @@
+# Begin preamble
+
+import ctypes, os, sys
+from ctypes import *
+
+_int_types = (c_int16, c_int32)
+if hasattr(ctypes, 'c_int64'):
+    # Some builds of ctypes apparently do not have c_int64
+    # defined; it's a pretty good bet that these builds do not
+    # have 64-bit pointers.
+    _int_types += (c_int64,)
+for t in _int_types:
+    if sizeof(t) == sizeof(c_size_t):
+        c_ptrdiff_t = t
+del t
+del _int_types
+
+class c_void(Structure):
+    # c_void_p is a buggy return type, converting to int, so
+    # POINTER(None) == c_void_p is actually written as
+    # POINTER(c_void), so it can be treated as a real pointer.
+    _fields_ = [('dummy', c_int)]
+
+def POINTER(obj):
+    p = ctypes.POINTER(obj)
+
+    # Convert None to a real NULL pointer to work around bugs
+    # in how ctypes handles None on 64-bit platforms
+    if not isinstance(p.from_param, classmethod):
+        def from_param(cls, x):
+            if x is None:
+                return cls()
+            else:
+                return x
+        p.from_param = classmethod(from_param)
+
+    return p
+
+class UserString:
+    def __init__(self, seq):
+        if isinstance(seq, basestring):
+            self.data = seq
+        elif isinstance(seq, UserString):
+            self.data = seq.data[:]
+        else:
+            self.data = str(seq)
+    def __str__(self): return str(self.data)
+    def __repr__(self): return repr(self.data)
+    def __int__(self): return int(self.data)
+    def __long__(self): return long(self.data)
+    def __float__(self): return float(self.data)
+    def __complex__(self): return complex(self.data)
+    def __hash__(self): return hash(self.data)
+
+    def __cmp__(self, string):
+        if isinstance(string, UserString):
+            return cmp(self.data, string.data)
+        else:
+            return cmp(self.data, string)
+    def __contains__(self, char):
+        return char in self.data
+
+    def __len__(self): return len(self.data)
+    def __getitem__(self, index): return self.__class__(self.data[index])
+    def __getslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        return self.__class__(self.data[start:end])
+
+    def __add__(self, other):
+        if isinstance(other, UserString):
+            return self.__class__(self.data + other.data)
+        elif isinstance(other, basestring):
+            return self.__class__(self.data + other)
+        else:
+            return self.__class__(self.data + str(other))
+    def __radd__(self, other):
+        if isinstance(other, basestring):
+            return self.__class__(other + self.data)
+        else:
+            return self.__class__(str(other) + self.data)
+    def __mul__(self, n):
+        return self.__class__(self.data*n)
+    __rmul__ = __mul__
+    def __mod__(self, args):
+        return self.__class__(self.data % args)
+
+    # the following methods are defined in alphabetical order:
+    def capitalize(self): return self.__class__(self.data.capitalize())
+    def center(self, width, *args):
+        return self.__class__(self.data.center(width, *args))
+    def count(self, sub, start=0, end=sys.maxint):
+        return self.data.count(sub, start, end)
+    def decode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.decode(encoding, errors))
+            else:
+                return self.__class__(self.data.decode(encoding))
+        else:
+            return self.__class__(self.data.decode())
+    def encode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.encode(encoding, errors))
+            else:
+                return self.__class__(self.data.encode(encoding))
+        else:
+            return self.__class__(self.data.encode())
+    def endswith(self, suffix, start=0, end=sys.maxint):
+        return self.data.endswith(suffix, start, end)
+    def expandtabs(self, tabsize=8):
+        return self.__class__(self.data.expandtabs(tabsize))
+    def find(self, sub, start=0, end=sys.maxint):
+        return self.data.find(sub, start, end)
+    def index(self, sub, start=0, end=sys.maxint):
+        return self.data.index(sub, start, end)
+    def isalpha(self): return self.data.isalpha()
+    def isalnum(self): return self.data.isalnum()
+    def isdecimal(self): return self.data.isdecimal()
+    def isdigit(self): return self.data.isdigit()
+    def islower(self): return self.data.islower()
+    def isnumeric(self): return self.data.isnumeric()
+    def isspace(self): return self.data.isspace()
+    def istitle(self): return self.data.istitle()
+    def isupper(self): return self.data.isupper()
+    def join(self, seq): return self.data.join(seq)
+    def ljust(self, width, *args):
+        return self.__class__(self.data.ljust(width, *args))
+    def lower(self): return self.__class__(self.data.lower())
+    def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
+    def partition(self, sep):
+        return self.data.partition(sep)
+    def replace(self, old, new, maxsplit=-1):
+        return self.__class__(self.data.replace(old, new, maxsplit))
+    def rfind(self, sub, start=0, end=sys.maxint):
+        return self.data.rfind(sub, start, end)
+    def rindex(self, sub, start=0, end=sys.maxint):
+        return self.data.rindex(sub, start, end)
+    def rjust(self, width, *args):
+        return self.__class__(self.data.rjust(width, *args))
+    def rpartition(self, sep):
+        return self.data.rpartition(sep)
+    def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
+    def split(self, sep=None, maxsplit=-1):
+        return self.data.split(sep, maxsplit)
+    def rsplit(self, sep=None, maxsplit=-1):
+        return self.data.rsplit(sep, maxsplit)
+    def splitlines(self, keepends=0): return self.data.splitlines(keepends)
+    def startswith(self, prefix, start=0, end=sys.maxint):
+        return self.data.startswith(prefix, start, end)
+    def strip(self, chars=None): return self.__class__(self.data.strip(chars))
+    def swapcase(self): return self.__class__(self.data.swapcase())
+    def title(self): return self.__class__(self.data.title())
+    def translate(self, *args):
+        return self.__class__(self.data.translate(*args))
+    def upper(self): return self.__class__(self.data.upper())
+    def zfill(self, width): return self.__class__(self.data.zfill(width))
+
+class MutableString(UserString):
+    """mutable string objects
+
+    Python strings are immutable objects.  This has the advantage, that
+    strings may be used as dictionary keys.  If this property isn't needed
+    and you insist on changing string values in place instead, you may cheat
+    and use MutableString.
+
+    But the purpose of this class is an educational one: to prevent
+    people from inventing their own mutable string class derived
+    from UserString and than forget thereby to remove (override) the
+    __hash__ method inherited from UserString.  This would lead to
+    errors that would be very hard to track down.
+
+    A faster and better solution is to rewrite your program using lists."""
+    def __init__(self, string=""):
+        self.data = string
+    def __hash__(self):
+        raise TypeError("unhashable type (it is mutable)")
+    def __setitem__(self, index, sub):
+        if index < 0:
+            index += len(self.data)
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + sub + self.data[index+1:]
+    def __delitem__(self, index):
+        if index < 0:
+            index += len(self.data)
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + self.data[index+1:]
+    def __setslice__(self, start, end, sub):
+        start = max(start, 0); end = max(end, 0)
+        if isinstance(sub, UserString):
+            self.data = self.data[:start]+sub.data+self.data[end:]
+        elif isinstance(sub, basestring):
+            self.data = self.data[:start]+sub+self.data[end:]
+        else:
+            self.data =  self.data[:start]+str(sub)+self.data[end:]
+    def __delslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        self.data = self.data[:start] + self.data[end:]
+    def immutable(self):
+        return UserString(self.data)
+    def __iadd__(self, other):
+        if isinstance(other, UserString):
+            self.data += other.data
+        elif isinstance(other, basestring):
+            self.data += other
+        else:
+            self.data += str(other)
+        return self
+    def __imul__(self, n):
+        self.data *= n
+        return self
+
+class String(MutableString, Union):
+
+    _fields_ = [('raw', POINTER(c_char)),
+                ('data', c_char_p)]
+
+    def __init__(self, obj=""):
+        if isinstance(obj, (str, unicode, UserString)):
+            self.data = str(obj)
+        else:
+            self.raw = obj
+
+    def __len__(self):
+        return self.data and len(self.data) or 0
+
+    def from_param(cls, obj):
+        # Convert None or 0
+        if obj is None or obj == 0:
+            return cls(POINTER(c_char)())
+
+        # Convert from String
+        elif isinstance(obj, String):
+            return obj
+
+        # Convert from str
+        elif isinstance(obj, str):
+            return cls(obj)
+
+        # Convert from c_char_p
+        elif isinstance(obj, c_char_p):
+            return obj
+
+        # Convert from POINTER(c_char)
+        elif isinstance(obj, POINTER(c_char)):
+            return obj
+
+        # Convert from raw pointer
+        elif isinstance(obj, int):
+            return cls(cast(obj, POINTER(c_char)))
+
+        # Convert from object
+        else:
+            return String.from_param(obj._as_parameter_)
+    from_param = classmethod(from_param)
+
+def ReturnString(obj, func, arguments):
+    return String.from_param(obj)
+
+# As of ctypes 1.0, ctypes does not support custom error-checking
+# functions on callbacks, nor does it support custom datatypes on
+# callbacks, so we must ensure that all callbacks return
+# primitive datatypes.
+#
+# Non-primitive return values wrapped with UNCHECKED won't be
+# typechecked, and will be converted to c_void_p.
+def UNCHECKED(type):
+    if (hasattr(type, "_type_") and isinstance(type._type_, str)
+        and type._type_ != "P"):
+        return type
+    else:
+        return c_void_p
+
+# ctypes doesn't have direct support for variadic functions, so we have to write
+# our own wrapper class
+class _variadic_function(object):
+    def __init__(self,func,restype,argtypes):
+        self.func=func
+        self.func.restype=restype
+        self.argtypes=argtypes
+    def _as_parameter_(self):
+        # So we can pass this variadic function as a function pointer
+        return self.func
+    def __call__(self,*args):
+        fixed_args=[]
+        i=0
+        for argtype in self.argtypes:
+            # Typecheck what we can
+            fixed_args.append(argtype.from_param(args[i]))
+            i+=1
+        return self.func(*fixed_args+list(args[i:]))
+
+# End preamble
+# Begin loader
+
+# ----------------------------------------------------------------------------
+# Copyright (c) 2008 David James
+# Copyright (c) 2006-2008 Alex Holkner
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#  * Neither the name of pyglet nor the names of its
+#    contributors may be used to endorse or promote products
+#    derived from this software without specific prior written
+#    permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# ----------------------------------------------------------------------------
+
+import os.path, re, sys, glob
+import ctypes
+import ctypes.util
+
+def _environ_path(name):
+    if name in os.environ:
+        return os.environ[name].split(":")
+    else:
+        return []
+
+class LibraryLoader(object):
+    def __init__(self):
+        self.other_dirs=[]
+
+    def load_library(self,libname):
+        """Given the name of a library, load it."""
+        paths = self.getpaths(libname)
+
+        for path in paths:
+            if os.path.exists(path):
+                return self.load(path)
+
+        raise ImportError("%s not found." % libname)
+
+    def load(self,path):
+        """Given a path to a library, load it."""
+        try:
+            # Darwin requires dlopen to be called with mode RTLD_GLOBAL instead
+            # of the default RTLD_LOCAL.  Without this, you end up with
+            # libraries not being loadable, resulting in "Symbol not found"
+            # errors
+            if sys.platform == 'darwin':
+                return ctypes.CDLL(path, ctypes.RTLD_GLOBAL)
+            else:
+                return ctypes.cdll.LoadLibrary(path)
+        except OSError,e:
+            raise ImportError(e)
+
+    def getpaths(self,libname):
+        """Return a list of paths where the library might be found."""
+        if os.path.isabs(libname):
+            yield libname
+
+        else:
+            for path in self.getplatformpaths(libname):
+                yield path
+
+            path = ctypes.util.find_library(libname)
+            if path: yield path
+
+    def getplatformpaths(self, libname):
+        return []
+
+# Darwin (Mac OS X)
+
+class DarwinLibraryLoader(LibraryLoader):
+    name_formats = ["lib%s.dylib", "lib%s.so", "lib%s.bundle", "%s.dylib",
+                "%s.so", "%s.bundle", "%s"]
+
+    def getplatformpaths(self,libname):
+        if os.path.pathsep in libname:
+            names = [libname]
+        else:
+            names = [format % libname for format in self.name_formats]
+
+        for dir in self.getdirs(libname):
+            for name in names:
+                yield os.path.join(dir,name)
+
+    def getdirs(self,libname):
+        '''Implements the dylib search as specified in Apple documentation:
+
+        http://developer.apple.com/documentation/DeveloperTools/Conceptual/
+            DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
+
+        Before commencing the standard search, the method first checks
+        the bundle's ``Frameworks`` directory if the application is running
+        within a bundle (OS X .app).
+        '''
+
+        dyld_fallback_library_path = _environ_path("DYLD_FALLBACK_LIBRARY_PATH")
+        if not dyld_fallback_library_path:
+            dyld_fallback_library_path = [os.path.expanduser('~/lib'),
+                                          '/usr/local/lib', '/usr/lib']
+
+        dirs = []
+
+        if '/' in libname:
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+        else:
+            dirs.extend(_environ_path("LD_LIBRARY_PATH"))
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+
+        dirs.extend(self.other_dirs)
+        dirs.append(".")
+
+        if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app':
+            dirs.append(os.path.join(
+                os.environ['RESOURCEPATH'],
+                '..',
+                'Frameworks'))
+
+        dirs.extend(dyld_fallback_library_path)
+
+        return dirs
+
+# Posix
+
+class PosixLibraryLoader(LibraryLoader):
+    _ld_so_cache = None
+
+    def _create_ld_so_cache(self):
+        # Recreate search path followed by ld.so.  This is going to be
+        # slow to build, and incorrect (ld.so uses ld.so.cache, which may
+        # not be up-to-date).  Used only as fallback for distros without
+        # /sbin/ldconfig.
+        #
+        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.
+
+        directories = []
+        for name in ("LD_LIBRARY_PATH",
+                     "SHLIB_PATH", # HPUX
+                     "LIBPATH", # OS/2, AIX
+                     "LIBRARY_PATH", # BE/OS
+                    ):
+            if name in os.environ:
+                directories.extend(os.environ[name].split(os.pathsep))
+        directories.extend(self.other_dirs)
+        directories.append(".")
+
+        try: directories.extend([dir.strip() for dir in open('/etc/ld.so.conf')])
+        except IOError: pass
+
+        directories.extend(['/lib', '/usr/lib', '/lib64', '/usr/lib64'])
+
+        cache = {}
+        lib_re = re.compile(r'lib(.*)\.s[ol]')
+        ext_re = re.compile(r'\.s[ol]$')
+        for dir in directories:
+            try:
+                for path in glob.glob("%s/*.s[ol]*" % dir):
+                    file = os.path.basename(path)
+
+                    # Index by filename
+                    if file not in cache:
+                        cache[file] = path
+
+                    # Index by library name
+                    match = lib_re.match(file)
+                    if match:
+                        library = match.group(1)
+                        if library not in cache:
+                            cache[library] = path
+            except OSError:
+                pass
+
+        self._ld_so_cache = cache
+
+    def getplatformpaths(self, libname):
+        if self._ld_so_cache is None:
+            self._create_ld_so_cache()
+
+        result = self._ld_so_cache.get(libname)
+        if result: yield result
+
+        path = ctypes.util.find_library(libname)
+        if path: yield os.path.join("/lib",path)
+
+# Windows
+
+class _WindowsLibrary(object):
+    def __init__(self, path):
+        self.cdll = ctypes.cdll.LoadLibrary(path)
+        self.windll = ctypes.windll.LoadLibrary(path)
+
+    def __getattr__(self, name):
+        try: return getattr(self.cdll,name)
+        except AttributeError:
+            try: return getattr(self.windll,name)
+            except AttributeError:
+                raise
+
+class WindowsLibraryLoader(LibraryLoader):
+    name_formats = ["%s.dll", "lib%s.dll", "%slib.dll"]
+
+    def load_library(self, libname):
+        try:
+            result = LibraryLoader.load_library(self, libname)
+        except ImportError:
+            result = None
+            if os.path.sep not in libname:
+                for name in self.name_formats:
+                    try:
+                        result = getattr(ctypes.cdll, name % libname)
+                        if result:
+                            break
+                    except WindowsError:
+                        result = None
+            if result is None:
+                try:
+                    result = getattr(ctypes.cdll, libname)
+                except WindowsError:
+                    result = None
+            if result is None:
+                raise ImportError("%s not found." % libname)
+        return result
+
+    def load(self, path):
+        return _WindowsLibrary(path)
+
+    def getplatformpaths(self, libname):
+        if os.path.sep not in libname:
+            for name in self.name_formats:
+                path = ctypes.util.find_library(name % libname)
+                if path:
+                    yield path
+
+# Platform switching
+
+# If your value of sys.platform does not appear in this dict, please contact
+# the Ctypesgen maintainers.
+
+loaderclass = {
+    "darwin":   DarwinLibraryLoader,
+    "cygwin":   WindowsLibraryLoader,
+    "win32":    WindowsLibraryLoader
+}
+
+loader = loaderclass.get(sys.platform, PosixLibraryLoader)()
+
+def add_library_search_dirs(other_dirs):
+    loader.other_dirs = other_dirs
+
+load_library = loader.load_library
+
+del loaderclass
+
+# End loader

Added: grass/branches/develbranch_6/lib/python/ctypes/fix.sed
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/fix.sed	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/fix.sed	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,7 @@
+#!/usr/bin/sed -f
+/^# End loader$/a\
+from ctypes_preamble import *\
+from ctypes_preamble import _variadic_function\
+from ctypes_loader import *
+/^# Begin preamble$/,/^# End preamble$/d
+/^# Begin loader$/,/^# End loader$/d

Added: grass/branches/develbranch_6/lib/python/ctypes/loader.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/loader.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/loader.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,248 @@
+# ----------------------------------------------------------------------------
+# Copyright (c) 2008 David James
+# Copyright (c) 2006-2008 Alex Holkner
+# All rights reserved.
+# 
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions 
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright 
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#  * Neither the name of pyglet nor the names of its
+#    contributors may be used to endorse or promote products
+#    derived from this software without specific prior written
+#    permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# ----------------------------------------------------------------------------
+
+import os.path, re, sys, glob
+import ctypes
+import ctypes.util
+
+def _environ_path(name):
+    if name in os.environ:
+        return os.environ[name].split(":")
+    else:
+        return []
+
+class LibraryLoader(object):
+    def __init__(self):
+        self.other_dirs=[]
+    
+    def load_library(self,libname):
+        """Given the name of a library, load it."""
+        paths = self.getpaths(libname)
+        
+        for path in paths:
+            if os.path.exists(path):
+                return self.load(path)
+        
+        raise ImportError,"%s not found." % libname
+    
+    def load(self,path):
+        """Given a path to a library, load it."""
+        try:
+            # Darwin requires dlopen to be called with mode RTLD_GLOBAL instead
+            # of the default RTLD_LOCAL.  Without this, you end up with
+            # libraries not being loadable, resulting in "Symbol not found"
+            # errors
+            if sys.platform == 'darwin':
+                return ctypes.CDLL(path, ctypes.RTLD_GLOBAL)
+            else:
+                return ctypes.cdll.LoadLibrary(path)
+        except OSError,e:
+            raise ImportError,e
+    
+    def getpaths(self,libname):
+        """Return a list of paths where the library might be found."""
+        if os.path.isabs(libname):
+            yield libname
+        
+        else:
+            for path in self.getplatformpaths(libname):
+                yield path
+            
+            path = ctypes.util.find_library(libname)
+            if path: yield path
+    
+    def getplatformpaths(self, libname):
+        return []
+
+# Darwin (Mac OS X)
+
+class DarwinLibraryLoader(LibraryLoader):
+    name_formats = ["lib%s.dylib", "lib%s.so", "lib%s.bundle", "%s.dylib",
+                "%s.so", "%s.bundle", "%s"]
+    
+    def getplatformpaths(self,libname):
+        if os.path.pathsep in libname:
+            names = [libname]
+        else:
+            names = [format % libname for format in self.name_formats]
+        
+        for dir in self.getdirs(libname):
+            for name in names:
+                yield os.path.join(dir,name)
+    
+    def getdirs(self,libname):
+        '''Implements the dylib search as specified in Apple documentation:
+        
+        http://developer.apple.com/documentation/DeveloperTools/Conceptual/
+            DynamicLibraries/Articles/DynamicLibraryUsageGuidelines.html
+
+        Before commencing the standard search, the method first checks
+        the bundle's ``Frameworks`` directory if the application is running
+        within a bundle (OS X .app).
+        '''
+
+        dyld_fallback_library_path = _environ_path("DYLD_FALLBACK_LIBRARY_PATH")
+        if not dyld_fallback_library_path:
+            dyld_fallback_library_path = [os.path.expanduser('~/lib'),
+                                          '/usr/local/lib', '/usr/lib']
+        
+        dirs = []
+        
+        if '/' in libname:
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+        else:
+            dirs.extend(_environ_path("LD_LIBRARY_PATH"))
+            dirs.extend(_environ_path("DYLD_LIBRARY_PATH"))
+
+        dirs.extend(self.other_dirs)
+        dirs.append(".")
+        
+        if hasattr(sys, 'frozen') and sys.frozen == 'macosx_app':
+            dirs.append(os.path.join(
+                os.environ['RESOURCEPATH'],
+                '..',
+                'Frameworks'))
+
+        dirs.extend(dyld_fallback_library_path)
+        
+        return dirs
+
+# Posix
+
+class PosixLibraryLoader(LibraryLoader):
+    _ld_so_cache = None
+    
+    def _create_ld_so_cache(self):
+        # Recreate search path followed by ld.so.  This is going to be
+        # slow to build, and incorrect (ld.so uses ld.so.cache, which may
+        # not be up-to-date).  Used only as fallback for distros without
+        # /sbin/ldconfig.
+        #
+        # We assume the DT_RPATH and DT_RUNPATH binary sections are omitted.
+
+        directories = []
+        for name in ("LD_LIBRARY_PATH",
+                     "SHLIB_PATH", # HPUX
+                     "LIBPATH", # OS/2, AIX
+                     "LIBRARY_PATH", # BE/OS
+                    ):
+            if name in os.environ:
+                directories.extend(os.environ[name].split(os.pathsep))
+        directories.extend(self.other_dirs)
+        directories.append(".")
+
+        try: directories.extend([dir.strip() for dir in open('/etc/ld.so.conf')])
+        except IOError: pass
+
+        directories.extend(['/lib', '/usr/lib', '/lib64', '/usr/lib64'])
+
+        cache = {}
+        lib_re = re.compile(r'lib(.*)\.s[ol]')
+        ext_re = re.compile(r'\.s[ol]$')
+        for dir in directories:
+            try:
+                for path in glob.glob("%s/*.s[ol]*" % dir):
+                    file = os.path.basename(path)
+
+                    # Index by filename
+                    if file not in cache:
+                        cache[file] = path
+                    
+                    # Index by library name
+                    match = lib_re.match(file)
+                    if match:
+                        library = match.group(1)
+                        if library not in cache:
+                            cache[library] = path
+            except OSError:
+                pass
+
+        self._ld_so_cache = cache
+    
+    def getplatformpaths(self, libname):
+        if self._ld_so_cache is None:
+            self._create_ld_so_cache()
+
+        result = self._ld_so_cache.get(libname)
+        if result: yield result
+
+        path = ctypes.util.find_library(libname)
+        if path: yield os.path.join("/lib",path)
+
+# Windows
+
+class _WindowsLibrary(object):
+    def __init__(self, path):
+        self.cdll = ctypes.cdll.LoadLibrary(path)
+        self.windll = ctypes.windll.LoadLibrary(path)
+
+    def __getattr__(self, name):
+        try: return getattr(self.cdll,name)
+        except AttributeError:
+            try: return getattr(self.windll,name)
+            except AttributeError:
+                raise
+
+class WindowsLibraryLoader(LibraryLoader):
+    name_formats = ["%s.dll", "lib%s.dll"]
+    
+    def load(self, path):
+        return _WindowsLibrary(path)
+    
+    def getplatformpaths(self, libname):
+        if os.path.sep not in libname:
+            for name in self.name_formats:
+                path = ctypes.util.find_library(name % libname)
+                if path:
+                    yield path
+
+# Platform switching
+
+# If your value of sys.platform does not appear in this dict, please contact
+# the Ctypesgen maintainers.
+
+loaderclass = {
+    "darwin":   DarwinLibraryLoader,
+    "cygwin":   WindowsLibraryLoader,
+    "win32":    WindowsLibraryLoader
+}
+
+loader = loaderclass.get(sys.platform, PosixLibraryLoader)()
+
+def add_library_search_dirs(other_dirs):
+    loader.other_dirs = other_dirs
+
+load_library = loader.load_library
+
+del loaderclass

Added: grass/branches/develbranch_6/lib/python/ctypes/preamble.py
===================================================================
--- grass/branches/develbranch_6/lib/python/ctypes/preamble.py	                        (rev 0)
+++ grass/branches/develbranch_6/lib/python/ctypes/preamble.py	2010-07-28 08:43:53 UTC (rev 42916)
@@ -0,0 +1,72 @@
+import ctypes, os, sys
+from ctypes import *
+
+_int_types = (c_int16, c_int32)
+if hasattr(ctypes, 'c_int64'):
+    # Some builds of ctypes apparently do not have c_int64
+    # defined; it's a pretty good bet that these builds do not
+    # have 64-bit pointers.
+    _int_types += (c_int64,)
+for t in _int_types:
+    if sizeof(t) == sizeof(c_size_t):
+        c_ptrdiff_t = t
+del t
+del _int_types
+
+class c_void(Structure):
+    # c_void_p is a buggy return type, converting to int, so
+    # POINTER(None) == c_void_p is actually written as
+    # POINTER(c_void), so it can be treated as a real pointer.
+    _fields_ = [('dummy', c_int)]
+
+def POINTER(obj):
+    p = ctypes.POINTER(obj)
+
+    # Convert None to a real NULL pointer to work around bugs
+    # in how ctypes handles None on 64-bit platforms
+    if not isinstance(p.from_param, classmethod):
+        def from_param(cls, x):
+            if x is None:
+                return cls()
+            else:
+                return x
+        p.from_param = classmethod(from_param)
+
+    return p
+
+String = c_char_p
+ReturnString = c_char_p
+
+# As of ctypes 1.0, ctypes does not support custom error-checking
+# functions on callbacks, nor does it support custom datatypes on
+# callbacks, so we must ensure that all callbacks return
+# primitive datatypes.
+#
+# Non-primitive return values wrapped with UNCHECKED won't be
+# typechecked, and will be converted to c_void_p.
+def UNCHECKED(type):
+    if (hasattr(type, "_type_") and isinstance(type._type_, str)
+        and type._type_ != "P"):
+        return type
+    else:
+        return c_void_p
+
+# ctypes doesn't have direct support for variadic functions, so we have to write
+# our own wrapper class
+class _variadic_function(object):
+    def __init__(self,func,restype,argtypes):
+        self.func=func
+        self.func.restype=restype
+        self.argtypes=argtypes
+    def _as_parameter_(self):
+        # So we can pass this variadic function as a function pointer
+        return self.func
+    def __call__(self,*args):
+        fixed_args=[]
+        i=0
+        for argtype in self.argtypes:
+            # Typecheck what we can
+            fixed_args.append(argtype.from_param(args[i]))
+            i+=1
+        return self.func(*fixed_args+list(args[i:]))
+



More information about the grass-commit mailing list