浏览代码

ctypes: change syntax to support both python2 and python3

git-svn-id: https://svn.osgeo.org/grass/grass/trunk@68351 15284696-431f-4ddb-bdfa-cd5b030d7da7
Pietro Zambelli 9 年之前
父节点
当前提交
2e73aa974c

+ 1 - 1
lib/python/ctypes/ctypesgen.py

@@ -16,7 +16,7 @@ import optparse
 import sys
 
 import ctypesgencore
-import ctypesgencore.messages as msgs
+from ctypesgencore import messages as msgs
 
 
 def option_callback_W(option, opt, value, parser):

+ 8 - 8
lib/python/ctypes/ctypesgencore/__init__.py

@@ -48,15 +48,15 @@ __all__ = ["parser", "processor", "printer",
            "messages", "options"]
 
 # Workhorse modules
-import parser
-import processor
-import printer
+from . import parser
+from . import processor
+from . import printer
 
 # Modules describing internal format
-import descriptions
-import ctypedescs
-import expressions
+from . import descriptions
+from . import ctypedescs
+from . import expressions
 
 # Helper modules
-import messages
-import options
+from . import messages
+from . import options

+ 1 - 1
lib/python/ctypes/ctypesgencore/expressions.py

@@ -8,7 +8,7 @@ which returns a Python string representing that expression.
 
 import keyword
 
-from ctypedescs import *
+from .ctypedescs import *
 
 
 # Right now, the objects in this module are all oriented toward evaluation.

+ 4 - 3
lib/python/ctypes/ctypesgencore/messages.py

@@ -19,6 +19,7 @@ Warning classes are:
 'rename' - a description has been renamed to avoid a name conflict
 'other' - catchall.
 """
+from __future__ import print_function
 
 import sys
 
@@ -26,12 +27,12 @@ __all__ = ["error_message", "warning_message", "status_message"]
 
 
 def error_message(msg, cls=None):
-    print "Error: %s" % msg
+    print("Error: %s" % msg)
 
 
 def warning_message(msg, cls=None):
-    print "Warning: %s" % msg
+    print("Warning: %s" % msg)
 
 
 def status_message(msg):
-    print "Status: %s" % msg
+    print("Status: %s" % msg)

+ 1 - 1
lib/python/ctypes/ctypesgencore/parser/__init__.py

@@ -14,7 +14,7 @@ for more information.
 
 """
 
-from datacollectingparser import DataCollectingParser
+from .datacollectingparser import DataCollectingParser
 
 
 def parse(headers, options):

+ 10 - 4
lib/python/ctypes/ctypesgencore/parser/cgrammar.py

@@ -10,6 +10,11 @@ Reference is C99:
   * http://www.open-std.org/JTC1/SC22/WG14/www/docs/n1124.pdf
 
 '''
+try:
+    from builtins import long
+except ImportError:
+    # python3
+    long = int
 
 __docformat__ = 'restructuredtext'
 
@@ -20,11 +25,11 @@ import sys
 import time
 import warnings
 
-import cdeclarations
+from . import cdeclarations
 import ctypesgencore.expressions as expressions
-import ctypesparser
-import preprocessor
-import yacc
+from . import preprocessor
+from . import yacc
+
 
 tokens = (
     'PP_DEFINE', 'PP_DEFINE_NAME', 'PP_DEFINE_MACRO_NAME', 'PP_MACRO_PARAM',
@@ -906,6 +911,7 @@ def p_type_name(p):
     '''type_name : specifier_qualifier_list
                  | specifier_qualifier_list abstract_declarator
     '''
+    from . import ctypesparser
     typ = p[1]
     if len(p) == 3:
         declarator = p[2]

+ 12 - 10
lib/python/ctypes/ctypesgencore/parser/cparser.py

@@ -6,6 +6,8 @@ Parse a C source file.
 To use, subclass CParser and override its handle_* methods.  Then instantiate
 the class with a string to parse.
 '''
+from __future__ import print_function
+
 
 __docformat__ = 'restructuredtext'
 
@@ -16,10 +18,10 @@ import sys
 import time
 import warnings
 
-import cdeclarations
-import cgrammar
-import preprocessor
-import yacc
+from . import cdeclarations
+from . import cgrammar
+from . import preprocessor
+from . import yacc
 
 
 # --------------------------------------------------------------------------
@@ -131,7 +133,7 @@ class CParser(object):
         The parser will try to recover from errors by synchronising at the
         next semicolon.
         '''
-        print >> sys.stderr, '%s:%s %s' % (filename, lineno, message)
+        print('%s:%s %s' % (filename, lineno, message), file=sys.stderr)
 
     def handle_pp_error(self, message):
         '''The C preprocessor emitted an error.
@@ -139,14 +141,14 @@ class CParser(object):
         The default implementatin prints the error to stderr. If processing
         can continue, it will.
         '''
-        print >> sys.stderr, 'Preprocessor:', message
+        print('Preprocessor:', message, file=sys.stderr)
 
     def handle_status(self, message):
         '''Progress information.
 
         The default implementationg prints message to stderr.
         '''
-        print >> sys.stderr, message
+        print(message, file=sys.stderr)
 
     def handle_define(self, name, params, value, filename, lineno):
         '''#define `name` `value`
@@ -202,13 +204,13 @@ class DebugCParser(CParser):
     '''
 
     def handle_define(self, name, value, filename, lineno):
-        print '#define name=%r, value=%r' % (name, value)
+        print('#define name=%r, value=%r' % (name, value))
 
     def handle_define_constant(self, name, value, filename, lineno):
-        print '#define constant name=%r, value=%r' % (name, value)
+        print('#define constant name=%r, value=%r' % (name, value))
 
     def handle_declaration(self, declaration, filename, lineno):
-        print declaration
+        print(declaration)
 
 if __name__ == '__main__':
     DebugCParser().parse(sys.argv[1], debug=True)

+ 2 - 2
lib/python/ctypes/ctypesgencore/parser/ctypesparser.py

@@ -12,8 +12,8 @@ __docformat__ = 'restructuredtext'
 
 __all__ = ["CtypesParser"]
 
-from cdeclarations import *
-from cparser import *
+from .cdeclarations import *
+from .cparser import *
 from ctypesgencore.ctypedescs import *
 from ctypesgencore.expressions import *
 

+ 5 - 3
lib/python/ctypes/ctypesgencore/parser/datacollectingparser.py

@@ -6,11 +6,13 @@ objects from the CtypesType objects and other information from CtypesParser.
 After parsing is complete, a DescriptionCollection object can be retrieved by
 calling DataCollectingParser.data().
 """
+from __future__ import print_function
+
 
 import os
 from tempfile import mkstemp
 
-import ctypesparser
+from . import ctypesparser
 from ctypesgencore.ctypedescs import *
 from ctypesgencore.descriptions import *
 from ctypesgencore.expressions import *
@@ -64,9 +66,9 @@ class DataCollectingParser(ctypesparser.CtypesParser,
         fd, fname = mkstemp(suffix=".h")
         f = os.fdopen(fd, 'w+b')
         for header in self.options.other_headers:
-            print >>f, '#include <%s>' % header
+            print('#include <%s>' % header, file=f)
         for header in self.headers:
-            print >>f, '#include "%s"' % os.path.abspath(header)
+            print('#include "%s"' % os.path.abspath(header), file=f)
         f.flush()
         f.close()
         ctypesparser.CtypesParser.parse(self, fname, None)

+ 89 - 55
lib/python/ctypes/ctypesgencore/parser/lex.py

@@ -23,13 +23,52 @@
 #
 # See the file LICENSE for a complete copy of the LGPL.
 #-----------------------------------------------------------------------------
+from __future__ import print_function
 
 __version__ = "2.2"
 
+
+try:
+    from builtins import bytes
+    PY3 = True
+except ImportError:
+    # python2
+    bytes = str
+
+
+import operator
 import os.path
 import re
 import sys
 import types
+import collections
+
+
+if PY3:
+    _meth_func = "__func__"
+    _meth_self = "__self__"
+
+    _func_closure = "__closure__"
+    _func_code = "__code__"
+    _func_defaults = "__defaults__"
+    _func_globals = "__globals__"
+else:
+    _meth_func = "im_func"
+    _meth_self = "im_self"
+
+    _func_closure = "func_closure"
+    _func_code = "func_code"
+    _func_defaults = "func_defaults"
+    _func_globals = "func_globals"
+
+# define compatible function to support PY2 & PY3
+get_mth_func = operator.attrgetter(_meth_func)
+get_mth_self = operator.attrgetter(_meth_self)
+get_func_closure = operator.attrgetter(_func_closure)
+get_func_code = operator.attrgetter(_func_code)
+get_func_defaults = operator.attrgetter(_func_defaults)
+get_func_globals = operator.attrgetter(_func_globals)
+
 
 # Regular expression used to match valid token names
 _is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
@@ -38,13 +77,8 @@ _is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
 # It's a little funky because I want to preserve backwards compatibility
 # with Python 2.0 where types.ObjectType is undefined.
 
-try:
-    _INSTANCETYPE = (types.InstanceType, types.ObjectType)
-except AttributeError:
-    _INSTANCETYPE = types.InstanceType
+_INSTANCETYPE = getattr(types, 'InstanceType', object)
 
-    class object:
-        pass       # Note: needed if no new-style classes present
 
 # Exception thrown when invalid token encountered and no default error
 # handler is defined.
@@ -192,7 +226,7 @@ class Lexer:
     # readtab() - Read lexer information from a tab file
     # ------------------------------------------------------------
     def readtab(self, tabfile, fdict):
-        exec "import %s as lextab" % tabfile
+        exec("import %s as lextab" % tabfile)
         self.lextokens = lextab._lextokens
         self.lexreflags = lextab._lexreflags
         self.lexliterals = lextab._lexliterals
@@ -220,7 +254,7 @@ class Lexer:
     # input() - Push a new string into the lexer
     # ------------------------------------------------------------
     def input(self, s):
-        if not (isinstance(s, types.StringType) or isinstance(s, types.UnicodeType)):
+        if not (isinstance(s, bytes) or isinstance(s, str)):
             raise ValueError("Expected a string")
         self.lexdata = s
         self.lexpos = 0
@@ -313,7 +347,7 @@ class Lexer:
                     break
 
                 # if func not callable, it means it's an ignored token
-                if not callable(func):
+                if not isinstance(func, collections.Callable):
                     break
 
                 # If token is processed by a function, call it
@@ -335,7 +369,7 @@ class Lexer:
                     # 20/Jan/2007
                     if newtok.type not in self.lextokens and len(newtok.type) > 1:
                         raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
-                            func.func_code.co_filename, func.func_code.co_firstlineno,
+                            get_func_code(func).co_filename, get_func_code(func).co_firstlineno,
                             func.__name__, newtok.type), lexdata[lexpos:])
 
                 return newtok
@@ -420,7 +454,7 @@ def _validate_file(filename):
             if not prev:
                 counthash[name] = linen
             else:
-                print "%s:%d: Rule %s redefined. Previously defined on line %d" % (filename, linen, name, prev)
+                print("%s:%d: Rule %s redefined. Previously defined on line %d" % (filename, linen, name, prev))
                 noerror = 0
         linen += 1
     return noerror
@@ -486,7 +520,7 @@ def _form_master_re(relist, reflags, ldict):
                 # callback function to carry out the action
                 if f.find("ignore_") > 0:
                     lexindexfunc[i] = (None, None)
-                    print "IGNORE", f
+                    print("IGNORE", f)
                 else:
                     lexindexfunc[i] = (None, f[2:])
 
@@ -601,7 +635,7 @@ def lex(module=None, object=None, debug=0, optimize=0,
 
     if not tokens:
         raise SyntaxError("lex: module does not define 'tokens'")
-    if not (isinstance(tokens, types.ListType) or isinstance(tokens, types.TupleType)):
+    if not (isinstance(tokens, list) or isinstance(tokens, tuple)):
         raise SyntaxError("lex: tokens must be a list or tuple.")
 
     # Build a dictionary of valid token names
@@ -609,54 +643,54 @@ def lex(module=None, object=None, debug=0, optimize=0,
     if not optimize:
         for n in tokens:
             if not _is_identifier.match(n):
-                print "lex: Bad token name '%s'" % n
+                print("lex: Bad token name '%s'" % n)
                 error = 1
             if warn and n in lexobj.lextokens:
-                print "lex: Warning. Token '%s' multiply defined." % n
+                print("lex: Warning. Token '%s' multiply defined." % n)
             lexobj.lextokens[n] = None
     else:
         for n in tokens:
             lexobj.lextokens[n] = None
 
     if debug:
-        print "lex: tokens = '%s'" % lexobj.lextokens.keys()
+        print("lex: tokens = '%s'" % list(lexobj.lextokens.keys()))
 
     try:
         for c in literals:
-            if not (isinstance(c, types.StringType) or isinstance(
-                    c, types.UnicodeType)) or len(c) > 1:
-                print "lex: Invalid literal %s. Must be a single character" % repr(c)
+            if not (isinstance(c, bytes) or isinstance(
+                    c, str)) or len(c) > 1:
+                print("lex: Invalid literal %s. Must be a single character" % repr(c))
                 error = 1
                 continue
 
     except TypeError:
-        print "lex: Invalid literals specification. literals must be a sequence of characters."
+        print("lex: Invalid literals specification. literals must be a sequence of characters.")
         error = 1
 
     lexobj.lexliterals = literals
 
     # Build statemap
     if states:
-        if not (isinstance(states, types.TupleType) or isinstance(states, types.ListType)):
-            print "lex: states must be defined as a tuple or list."
+        if not (isinstance(states, tuple) or isinstance(states, list)):
+            print("lex: states must be defined as a tuple or list.")
             error = 1
         else:
             for s in states:
-                if not isinstance(s, types.TupleType) or len(s) != 2:
-                    print "lex: invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')" % repr(s)
+                if not isinstance(s, tuple) or len(s) != 2:
+                    print("lex: invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')" % repr(s))
                     error = 1
                     continue
                 name, statetype = s
-                if not isinstance(name, types.StringType):
-                    print "lex: state name %s must be a string" % repr(name)
+                if not isinstance(name, bytes):
+                    print("lex: state name %s must be a string" % repr(name))
                     error = 1
                     continue
                 if not (statetype == 'inclusive' or statetype == 'exclusive'):
-                    print "lex: state type for state %s must be 'inclusive' or 'exclusive'" % name
+                    print("lex: state type for state %s must be 'inclusive' or 'exclusive'" % name)
                     error = 1
                     continue
                 if name in stateinfo:
-                    print "lex: state '%s' already defined." % name
+                    print("lex: state '%s' already defined." % name)
                     error = 1
                     continue
                 stateinfo[name] = statetype
@@ -685,19 +719,19 @@ def lex(module=None, object=None, debug=0, optimize=0,
         states, tokname = _statetoken(f, stateinfo)
         toknames[f] = tokname
 
-        if callable(t):
+        if isinstance(t, collections.Callable):
             for s in states:
                 funcsym[s].append((f, t))
-        elif (isinstance(t, types.StringType) or isinstance(t, types.UnicodeType)):
+        elif (isinstance(t, bytes) or isinstance(t, str)):
             for s in states:
                 strsym[s].append((f, t))
         else:
-            print "lex: %s not defined as a function or string" % f
+            print("lex: %s not defined as a function or string" % f)
             error = 1
 
     # Sort the functions by line number
     for f in funcsym.values():
-        f.sort(lambda x, y: cmp(x[1].func_code.co_firstlineno, y[1].func_code.co_firstlineno))
+        f.sort(lambda x, y: cmp(get_func_code(x[1]).co_firstlineno, get_func_code(y[1]).co_firstlineno))
 
     # Sort the strings by regular expression length
     for s in strsym.values():
@@ -711,31 +745,31 @@ def lex(module=None, object=None, debug=0, optimize=0,
 
         # Add rules defined by functions first
         for fname, f in funcsym[state]:
-            line = f.func_code.co_firstlineno
-            file = f.func_code.co_filename
+            line = get_func_code(f).co_firstlineno
+            file = get_func_code(f).co_filename
             files[file] = None
             tokname = toknames[fname]
 
             ismethod = isinstance(f, types.MethodType)
 
             if not optimize:
-                nargs = f.func_code.co_argcount
+                nargs = get_func_code(f).co_argcount
                 if ismethod:
                     reqargs = 2
                 else:
                     reqargs = 1
                 if nargs > reqargs:
-                    print "%s:%d: Rule '%s' has too many arguments." % (file, line, f.__name__)
+                    print("%s:%d: Rule '%s' has too many arguments." % (file, line, f.__name__))
                     error = 1
                     continue
 
                 if nargs < reqargs:
-                    print "%s:%d: Rule '%s' requires an argument." % (file, line, f.__name__)
+                    print("%s:%d: Rule '%s' requires an argument." % (file, line, f.__name__))
                     error = 1
                     continue
 
                 if tokname == 'ignore':
-                    print "%s:%d: Rule '%s' must be defined as a string." % (file, line, f.__name__)
+                    print("%s:%d: Rule '%s' must be defined as a string." % (file, line, f.__name__))
                     error = 1
                     continue
 
@@ -748,25 +782,25 @@ def lex(module=None, object=None, debug=0, optimize=0,
                     try:
                         c = re.compile("(?P<%s>%s)" % (f.__name__, f.__doc__), re.VERBOSE | reflags)
                         if c.match(""):
-                            print "%s:%d: Regular expression for rule '%s' matches empty string." % (file, line, f.__name__)
+                            print("%s:%d: Regular expression for rule '%s' matches empty string." % (file, line, f.__name__))
                             error = 1
                             continue
                     except re.error as e:
-                        print "%s:%d: Invalid regular expression for rule '%s'. %s" % (file, line, f.__name__, e)
+                        print("%s:%d: Invalid regular expression for rule '%s'. %s" % (file, line, f.__name__, e))
                         if '#' in f.__doc__:
-                            print "%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'." % (file, line, f.__name__)
+                            print("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'." % (file, line, f.__name__))
                         error = 1
                         continue
 
                     if debug:
-                        print "lex: Adding rule %s -> '%s' (state '%s')" % (f.__name__, f.__doc__, state)
+                        print("lex: Adding rule %s -> '%s' (state '%s')" % (f.__name__, f.__doc__, state))
 
                 # Okay. The regular expression seemed okay.  Let's append it to the master regular
                 # expression we're building
 
                 regex_list.append("(?P<%s>%s)" % (f.__name__, f.__doc__))
             else:
-                print "%s:%d: No regular expression defined for rule '%s'" % (file, line, f.__name__)
+                print("%s:%d: No regular expression defined for rule '%s'" % (file, line, f.__name__))
 
         # Now add all of the simple rules
         for name, r in strsym[state]:
@@ -783,29 +817,29 @@ def lex(module=None, object=None, debug=0, optimize=0,
                     continue
 
                 if tokname not in lexobj.lextokens and tokname.find("ignore_") < 0:
-                    print "lex: Rule '%s' defined for an unspecified token %s." % (name, tokname)
+                    print("lex: Rule '%s' defined for an unspecified token %s." % (name, tokname))
                     error = 1
                     continue
                 try:
                     c = re.compile("(?P<%s>%s)" % (name, r), re.VERBOSE | reflags)
                     if (c.match("")):
-                        print "lex: Regular expression for rule '%s' matches empty string." % name
+                        print("lex: Regular expression for rule '%s' matches empty string." % name)
                         error = 1
                         continue
                 except re.error as e:
-                    print "lex: Invalid regular expression for rule '%s'. %s" % (name, e)
+                    print("lex: Invalid regular expression for rule '%s'. %s" % (name, e))
                     if '#' in r:
-                        print "lex: Make sure '#' in rule '%s' is escaped with '\\#'." % name
+                        print("lex: Make sure '#' in rule '%s' is escaped with '\\#'." % name)
 
                     error = 1
                     continue
                 if debug:
-                    print "lex: Adding rule %s -> '%s' (state '%s')" % (name, r, state)
+                    print("lex: Adding rule %s -> '%s' (state '%s')" % (name, r, state))
 
             regex_list.append("(?P<%s>%s)" % (name, r))
 
         if not regex_list:
-            print "lex: No rules defined for state '%s'" % state
+            print("lex: No rules defined for state '%s'" % state)
             error = 1
 
         regexs[state] = regex_list
@@ -829,7 +863,7 @@ def lex(module=None, object=None, debug=0, optimize=0,
         lexobj.lexstateretext[state] = re_text
         if debug:
             for i in range(len(re_text)):
-                print "lex: state '%s'. regex[%d] = '%s'" % (state, i, re_text[i])
+                print("lex: state '%s'. regex[%d] = '%s'" % (state, i, re_text[i]))
 
     # For inclusive states, we need to add the INITIAL state
     for state, type in stateinfo.items():
@@ -849,15 +883,15 @@ def lex(module=None, object=None, debug=0, optimize=0,
     lexobj.lexstateerrorf = errorf
     lexobj.lexerrorf = errorf.get("INITIAL", None)
     if warn and not lexobj.lexerrorf:
-        print "lex: Warning. no t_error rule is defined."
+        print("lex: Warning. no t_error rule is defined.")
 
     # Check state information for ignore and error rules
     for s, stype in stateinfo.items():
         if stype == 'exclusive':
             if warn and s not in errorf:
-                print "lex: Warning. no error rule is defined for exclusive state '%s'" % s
+                print("lex: Warning. no error rule is defined for exclusive state '%s'" % s)
             if warn and s not in ignore and lexobj.lexignore:
-                print "lex: Warning. no ignore rule is defined for exclusive state '%s'" % s
+                print("lex: Warning. no ignore rule is defined for exclusive state '%s'" % s)
         elif stype == 'inclusive':
             if s not in errorf:
                 errorf[s] = errorf.get("INITIAL", None)
@@ -890,7 +924,7 @@ def runmain(lexer=None, data=None):
             data = f.read()
             f.close()
         except IndexError:
-            print "Reading from standard input (type EOF to end):"
+            print("Reading from standard input (type EOF to end):")
             data = sys.stdin.read()
 
     if lexer:
@@ -907,7 +941,7 @@ def runmain(lexer=None, data=None):
         tok = _token()
         if not tok:
             break
-        print "(%s,%r,%d,%d)" % (tok.type, tok.value, tok.lineno, tok.lexpos)
+        print("(%s,%r,%d,%d)" % (tok.type, tok.value, tok.lineno, tok.lexpos))
 
 
 # -----------------------------------------------------------------------------

文件差异内容过多而无法显示
+ 267 - 267
lib/python/ctypes/ctypesgencore/parser/parsetab.py


+ 15 - 4
lib/python/ctypes/ctypesgencore/parser/pplexer.py

@@ -10,6 +10,14 @@ Reference is C99:
 
 __docformat__ = 'restructuredtext'
 
+try:
+    from builtins import long
+    PY2 = True
+except ImportError:
+    # python3
+    PY2 = False
+    long = int
+
 import os
 import re
 import shlex
@@ -18,9 +26,9 @@ import tokenize
 import traceback
 
 import ctypes
-import lex
-import yacc
-from lex import TOKEN
+from . import lex
+from . import yacc
+from .lex import TOKEN
 
 tokens = (
     'HEADER_NAME', 'IDENTIFIER', 'PP_NUMBER', 'CHARACTER_CONSTANT',
@@ -142,7 +150,10 @@ punctuators = {
 
 def punctuator_regex(punctuators):
     punctuator_regexes = [v[0] for v in punctuators.values()]
-    punctuator_regexes.sort(lambda a, b: -cmp(len(a), len(b)))
+    if PY2:
+        punctuator_regexes.sort(lambda a, b: -cmp(len(a), len(b)))
+    else:
+        punctuator_regexes.sort(key=lambda a: -len(a))
     return '(%s)' % '|'.join(punctuator_regexes)
 
 # Process line-number directives from the preprocessor

+ 4 - 4
lib/python/ctypes/ctypesgencore/parser/preprocessor.py

@@ -19,10 +19,10 @@ import tokenize
 import traceback
 
 import ctypes
-import lex
-import pplexer
-import yacc
-from lex import TOKEN
+from . import lex
+from . import pplexer
+from . import yacc
+from .lex import TOKEN
 
 
 # --------------------------------------------------------------------------

+ 61 - 56
lib/python/ctypes/ctypesgencore/parser/yacc.py

@@ -51,6 +51,7 @@
 # consider to be good Python "coding style."   Modify the code at your
 # own risk!
 # ----------------------------------------------------------------------------
+from __future__ import print_function
 
 __version__ = "2.2"
 
@@ -69,12 +70,19 @@ default_lr = 'LALR'           # Default LR table generation method
 
 error_count = 3                # Number of symbols that must be shifted to leave recovery mode
 
-import cStringIO
+try:
+    import cStringIO as io
+except ImportError:
+    import io
+
+import imp
 import os.path
 import re
 import sys
 import types
 
+from .lex import get_func_code
+
 # <tm> 1 July 2008
 try:
     import hashlib
@@ -137,7 +145,7 @@ class YaccProduction:
         self.stack = stack
 
     def __getitem__(self, n):
-        if isinstance(n, types.IntType):
+        if isinstance(n, int):
             if n >= 0:
                 return self.slice[n].value
             else:
@@ -169,10 +177,10 @@ class YaccProduction:
 
     def pushback(self, n):
         if n <= 0:
-            raise ValueError, "Expected a positive value"
+            raise ValueError("Expected a positive value")
         if n > (len(self.slice) - 1):
-            raise ValueError, "Can't push %d tokens. Only %d are available." % (n, len(
-                self.slice) - 1)
+            raise ValueError("Can't push %d tokens. Only %d are available." % (n, len(
+                self.slice) - 1))
         for i in range(0, n):
             self.pbstack.append(self.slice[-i - 1])
 
@@ -235,7 +243,7 @@ class Parser:
 
         # If no lexer was given, we will try to use the lex module
         if not lexer:
-            import lex
+            from . import lex
             lexer = lex.lexer
 
         pslice.lexer = lexer
@@ -267,7 +275,7 @@ class Parser:
             # is already set, we just use that. Otherwise, we'll pull
             # the next token off of the lookaheadstack or from the lexer
             if debug > 1:
-                print 'state', statestack[-1]
+                print('state', statestack[-1])
             if not lookahead:
                 if not lookaheadstack:
                     lookahead = get_token()     # Get the next token
@@ -287,7 +295,7 @@ class Parser:
             t = actions.get((s, ltype), None)
 
             if debug > 1:
-                print 'action', t
+                print('action', t)
             if t is not None:
                 if t > 0:
                     # shift a symbol on the stack
@@ -452,7 +460,7 @@ class Parser:
                 continue
 
             # Call an error function here
-            raise RuntimeError, "yacc: internal parser error!!!\n"
+            raise RuntimeError("yacc: internal parser error!!!\n")
 
 # -----------------------------------------------------------------------------
 #                          === Parser Construction ===
@@ -522,13 +530,13 @@ def validate_dict(d):
 
         if n[0:2] == 'p_':
             sys.stderr.write("yacc: Warning. '%s' not defined as a function\n" % n)
-        if 1 and isinstance(v, types.FunctionType) and v.func_code.co_argcount == 1:
+        if 1 and isinstance(v, types.FunctionType) and get_func_code(v).co_argcount == 1:
             try:
                 doc = v.__doc__.split(" ")
                 if doc[1] == ':':
                     sys.stderr.write(
                         "%s:%d: Warning. Possible grammar rule '%s' defined without p_ prefix.\n" %
-                        (v.func_code.co_filename, v.func_code.co_firstlineno, n))
+                        (get_func_code(v).co_filename, get_func_code(v).co_firstlineno, n))
             except Exception:
                 pass
 
@@ -584,8 +592,8 @@ def initialize_vars():
 
     # File objects used when creating the parser.out debugging file
     global _vf, _vfc
-    _vf = cStringIO.StringIO()
-    _vfc = cStringIO.StringIO()
+    _vf = io.StringIO()
+    _vfc = io.StringIO()
 
 # -----------------------------------------------------------------------------
 # class Production:
@@ -807,8 +815,8 @@ def add_production(f, file, line, prodname, syms):
 
 
 def add_function(f):
-    line = f.func_code.co_firstlineno
-    file = f.func_code.co_filename
+    line = get_func_code(f).co_firstlineno
+    file = get_func_code(f).co_filename
     error = 0
 
     if isinstance(f, types.MethodType):
@@ -816,11 +824,11 @@ def add_function(f):
     else:
         reqdargs = 1
 
-    if f.func_code.co_argcount > reqdargs:
+    if get_func_code(f).co_argcount > reqdargs:
         sys.stderr.write("%s:%d: Rule '%s' has too many arguments.\n" % (file, line, f.__name__))
         return -1
 
-    if f.func_code.co_argcount < reqdargs:
+    if get_func_code(f).co_argcount < reqdargs:
         sys.stderr.write("%s:%d: Rule '%s' requires an argument.\n" % (file, line, f.__name__))
         return -1
 
@@ -879,7 +887,7 @@ def compute_reachable():
     (Unused terminals have already had their warning.)
     '''
     Reachable = {}
-    for s in Terminals.keys() + Nonterminals.keys():
+    for s in list(Terminals.keys()) + list(Nonterminals.keys()):
         Reachable[s] = 0
 
     mark_reachable_from(Productions[0].prod[0], Reachable)
@@ -1912,7 +1920,7 @@ def lr_parse_table(method):
                                 actionp[st, a] = p
 
             except Exception as e:
-                raise YaccError, "Hosed in lr_parse_table", e
+                raise YaccError("Hosed in lr_parse_table").with_traceback(e)
 
         # Print the actions associated with each terminal
         if yaccdebug:
@@ -2077,16 +2085,16 @@ del _lr_goto_items
         f.close()
 
     except IOError as e:
-        print "Unable to create '%s'" % filename
-        print e
+        print("Unable to create '%s'" % filename)
+        print(e)
         return
 
 
 def lr_read_tables(module=tab_module, optimize=0):
     global _lr_action, _lr_goto, _lr_productions, _lr_method
     try:
-        exec "import %s as parsetab" % module
-
+        fmod = imp.find_module('parsetab', sys.path + ['ctypesgencore/parser/', ])
+        parsetab = imp.load_module('parsetab', *fmod)
         if (optimize) or (Signature.digest() == parsetab._lr_signature):
             _lr_action = parsetab._lr_action
             _lr_goto = parsetab._lr_goto
@@ -2104,10 +2112,7 @@ def lr_read_tables(module=tab_module, optimize=0):
 # it's a little funky because I want to preserve backwards compatibility
 # with Python 2.0 where types.ObjectType is undefined.
 
-try:
-    _INSTANCETYPE = (types.InstanceType, types.ObjectType)
-except AttributeError:
-    _INSTANCETYPE = types.InstanceType
+_INSTANCETYPE = getattr(types, 'InstanceType', object)
 
 # -----------------------------------------------------------------------------
 # yacc(module)
@@ -2145,7 +2150,7 @@ def yacc(
             for i in _items:
                 ldict[i[0]] = i[1]
         else:
-            raise ValueError, "Expected a module"
+            raise ValueError("Expected a module")
 
     else:
         # No module given.  We might be able to get information from the caller.
@@ -2191,36 +2196,36 @@ def yacc(
             tokens = ldict.get("tokens", None)
 
         if not tokens:
-            raise YaccError, "module does not define a list 'tokens'"
-        if not (isinstance(tokens, types.ListType) or isinstance(tokens, types.TupleType)):
-            raise YaccError, "tokens must be a list or tuple."
+            raise YaccError("module does not define a list 'tokens'")
+        if not (isinstance(tokens, list) or isinstance(tokens, tuple)):
+            raise YaccError("tokens must be a list or tuple.")
 
         # Check to see if a requires dictionary is defined.
         requires = ldict.get("require", None)
         if requires:
-            if not (isinstance(requires, types.DictType)):
-                raise YaccError, "require must be a dictionary."
+            if not (isinstance(requires, dict)):
+                raise YaccError("require must be a dictionary.")
 
             for r, v in requires.items():
                 try:
-                    if not (isinstance(v, types.ListType)):
+                    if not (isinstance(v, list)):
                         raise TypeError
                     v1 = [x.split(".") for x in v]
                     Requires[r] = v1
                 except Exception:
-                    print "Invalid specification for rule '%s' in require. Expected a list of strings" % r
+                    print("Invalid specification for rule '%s' in require. Expected a list of strings" % r)
 
         # Build the dictionary of terminals.  We a record a 0 in the
         # dictionary to track whether or not a terminal is actually
         # used in the grammar
 
         if 'error' in tokens:
-            print "yacc: Illegal token 'error'.  Is a reserved word."
-            raise YaccError, "Illegal token name"
+            print("yacc: Illegal token 'error'.  Is a reserved word.")
+            raise YaccError("Illegal token name")
 
         for n in tokens:
             if n in Terminals:
-                print "yacc: Warning. Token '%s' multiply defined." % n
+                print("yacc: Warning. Token '%s' multiply defined." % n)
             Terminals[n] = []
 
         Terminals['error'] = []
@@ -2228,8 +2233,8 @@ def yacc(
         # Get the precedence map (if any)
         prec = ldict.get("precedence", None)
         if prec:
-            if not (isinstance(prec, types.ListType) or isinstance(prec, types.TupleType)):
-                raise YaccError, "precedence must be a list or tuple."
+            if not (isinstance(prec, list) or isinstance(prec, tuple)):
+                raise YaccError("precedence must be a list or tuple.")
             add_precedence(prec)
             Signature.update(repr(prec))
 
@@ -2245,17 +2250,17 @@ def yacc(
             elif isinstance(ef, types.MethodType):
                 ismethod = 1
             else:
-                raise YaccError, "'p_error' defined, but is not a function or method."
-            eline = ef.func_code.co_firstlineno
-            efile = ef.func_code.co_filename
+                raise YaccError("'p_error' defined, but is not a function or method.")
+            eline = get_func_code(ef).co_firstlineno
+            efile = get_func_code(ef).co_filename
             files[efile] = None
 
-            if (ef.func_code.co_argcount != 1 + ismethod):
-                raise YaccError, "%s:%d: p_error() requires 1 argument." % (efile, eline)
+            if (get_func_code(ef).co_argcount != 1 + ismethod):
+                raise YaccError("%s:%d: p_error() requires 1 argument." % (efile, eline))
             global Errorfunc
             Errorfunc = ef
         else:
-            print "yacc: Warning. no p_error() function is defined."
+            print("yacc: Warning. no p_error() function is defined.")
 
         # Get the list of built-in functions with p_ prefix
         symbols = [
@@ -2268,17 +2273,17 @@ def yacc(
 
         # Check for non-empty symbols
         if len(symbols) == 0:
-            raise YaccError, "no rules of the form p_rulename are defined."
+            raise YaccError("no rules of the form p_rulename are defined.")
 
         # Sort the symbols by line number
-        symbols.sort(lambda x, y: cmp(x.func_code.co_firstlineno, y.func_code.co_firstlineno))
+        symbols.sort(lambda x, y: cmp(get_func_code(x).co_firstlineno, get_func_code(y).co_firstlineno))
 
         # Add all of the symbols to the grammar
         for f in symbols:
             if (add_function(f)) < 0:
                 error += 1
             else:
-                files[f.func_code.co_filename] = None
+                files[get_func_code(f).co_filename] = None
 
         # Make a signature of the docstrings
         for f in symbols:
@@ -2288,7 +2293,7 @@ def yacc(
         lr_init_vars()
 
         if error:
-            raise YaccError, "Unable to construct parser."
+            raise YaccError("Unable to construct parser.")
 
         if not lr_read_tables(tabmodule):
 
@@ -2301,7 +2306,7 @@ def yacc(
             validate_dict(ldict)
 
             if start and start not in Prodnames:
-                raise YaccError, "Bad starting symbol '%s'" % start
+                raise YaccError("Bad starting symbol '%s'" % start)
 
             augment_grammar(start)
             error = verify_productions(cycle_check=check_recursion)
@@ -2313,7 +2318,7 @@ def yacc(
                         :2] != 'p_')]
 
             if error:
-                raise YaccError, "Unable to construct parser."
+                raise YaccError("Unable to construct parser.")
 
             build_lritems()
             compute_first1()
@@ -2322,7 +2327,7 @@ def yacc(
             if method in ['SLR', 'LALR']:
                 lr_parse_table(method)
             else:
-                raise YaccError, "Unknown parsing method '%s'" % method
+                raise YaccError("Unknown parsing method '%s'" % method)
 
             if write_tables:
                 lr_write_tables(tabmodule, outputdir)
@@ -2335,7 +2340,7 @@ def yacc(
                     f.write(_vf.getvalue())
                     f.close()
                 except IOError as e:
-                    print "yacc: can't create '%s'" % debugfile, e
+                    print("yacc: can't create '%s'" % debugfile, e)
 
     # Made it here.   Create a parser object and set up its internal state.
     # Set global parse() method to bound method of parser object.
@@ -2366,7 +2371,7 @@ class ParserPrototype(object):
 
     def __init__(self, magic=None):
         if magic != "xyzzy":
-            raise YaccError, 'Use yacc()'
+            raise YaccError('Use yacc()')
 
     def init_parser(self, parser=None):
         if not parser:
@@ -2401,4 +2406,4 @@ def yacc_cleanup():
 
 # Stub that raises an error if parsing is attempted without first calling yacc()
 def parse(*args, **kwargs):
-    raise YaccError, "yacc: No parser built with yacc()"
+    raise YaccError("yacc: No parser built with yacc()")

+ 1 - 1
lib/python/ctypes/ctypesgencore/printer/__init__.py

@@ -5,6 +5,6 @@ This module is the backend to ctypesgen; it contains classes to
 produce the final .py output files.
 """
 
-from printer import WrapperPrinter
+from .printer import WrapperPrinter
 
 __all__ = ["WrapperPrinter"]

+ 11 - 6
lib/python/ctypes/ctypesgencore/printer/preamble.py

@@ -1,3 +1,8 @@
+try:
+    from builtins import long
+except ImportError:
+    long = int
+
 import os
 import sys
 
@@ -43,7 +48,7 @@ def POINTER(obj):
 class UserString:
 
     def __init__(self, seq):
-        if isinstance(seq, basestring):
+        if isinstance(seq, str):
             self.data = seq
         elif isinstance(seq, UserString):
             self.data = seq.data[:]
@@ -85,13 +90,13 @@ class UserString:
     def __add__(self, other):
         if isinstance(other, UserString):
             return self.__class__(self.data + other.data)
-        elif isinstance(other, basestring):
+        elif isinstance(other, str):
             return self.__class__(self.data + other)
         else:
             return self.__class__(self.data + str(other))
 
     def __radd__(self, other):
-        if isinstance(other, basestring):
+        if isinstance(other, str):
             return self.__class__(other + self.data)
         else:
             return self.__class__(str(other) + self.data)
@@ -255,7 +260,7 @@ class MutableString(UserString):
         end = max(end, 0)
         if isinstance(sub, UserString):
             self.data = self.data[:start] + sub.data + self.data[end:]
-        elif isinstance(sub, basestring):
+        elif isinstance(sub, str):
             self.data = self.data[:start] + sub + self.data[end:]
         else:
             self.data = self.data[:start] + str(sub) + self.data[end:]
@@ -271,7 +276,7 @@ class MutableString(UserString):
     def __iadd__(self, other):
         if isinstance(other, UserString):
             self.data += other.data
-        elif isinstance(other, basestring):
+        elif isinstance(other, str):
             self.data += other
         else:
             self.data += str(other)
@@ -288,7 +293,7 @@ class String(MutableString, Union):
                 ('data', c_char_p)]
 
     def __init__(self, obj=""):
-        if isinstance(obj, (str, unicode, UserString)):
+        if isinstance(obj, (str, UserString)):
             self.data = str(obj)
         else:
             self.raw = obj

+ 107 - 105
lib/python/ctypes/ctypesgencore/printer/printer.py

@@ -1,8 +1,10 @@
 #!/usr/bin/env python
+from __future__ import print_function
+
 
 import os
 import sys
-import test  # So we can find the path to local files in the printer package
+from . import test  # So we can find the path to local files in the printer package
 import time
 
 import ctypesgencore.libraryloader  # So we can get the path to it
@@ -29,13 +31,13 @@ class WrapperPrinter:
             self.options.strip_build_path += os.path.sep
 
         self.print_header()
-        print >>self.file
+        print(file=self.file)
 
         self.print_preamble()
-        print >>self.file
+        print(file=self.file)
 
         self.print_loader()
-        print >>self.file
+        print(file=self.file)
 
         self.print_group(self.options.libraries, "libraries", self.print_library)
         self.print_group(self.options.modules, "modules", self.print_module)
@@ -54,36 +56,36 @@ class WrapperPrinter:
         for kind, desc in data.output_order:
             if desc.included:
                 method_table[kind](desc)
-                print >>self.file
+                print(file=self.file)
 
         self.print_group(self.options.inserted_files, "inserted files",
                          self.insert_file)
 
     def print_group(self, list, name, function):
         if list:
-            print >>self.file, "# Begin %s" % name
-            print >>self.file
+            print("# Begin %s" % name, file=self.file)
+            print(file=self.file)
             for obj in list:
                 function(obj)
-            print >>self.file
-            print >>self.file, "# %d %s" % (len(list), name)
-            print >>self.file, "# End %s" % name
+            print(file=self.file)
+            print("# %d %s" % (len(list), name), file=self.file)
+            print("# End %s" % name, file=self.file)
         else:
-            print >>self.file, "# No %s" % name
-        print >>self.file
+            print("# No %s" % name, file=self.file)
+        print(file=self.file)
 
     def srcinfo(self, src):
         if src is None:
-            print >>self.file
+            print(file=self.file)
         else:
             filename, lineno = src
             if filename in ("<built-in>", "<command line>"):
-                print >>self.file, "# %s" % filename
+                print("# %s" % filename, file=self.file)
             else:
                 if self.options.strip_build_path and \
                         filename.startswith(self.options.strip_build_path):
                     filename = filename[len(self.options.strip_build_path):]
-                print >>self.file, "# %s: %s" % (filename, lineno)
+                print("# %s: %s" % (filename, lineno), file=self.file)
 
     def template_subs(self):
         template_subs = {
@@ -92,7 +94,7 @@ class WrapperPrinter:
             'name': os.path.basename(self.options.headers[0])
         }
 
-        for opt, value in self.options.__dict__.iteritems():
+        for opt, value in self.options.__dict__.items():
             if isinstance(value, str):
                 template_subs[opt] = value
             elif isinstance(value, (list, tuple)):
@@ -125,72 +127,72 @@ class WrapperPrinter:
     def print_preamble(self):
         path = path_to_local_file("preamble.py")
 
-        print >>self.file, "# Begin preamble"
-        print >>self.file
+        print("# Begin preamble", file=self.file)
+        print(file=self.file)
         preamble_file = file(path, "r")
         self.file.write(preamble_file.read())
         preamble_file.close()
-        print >>self.file
-        print >>self.file, "# End preamble"
+        print(file=self.file)
+        print("# End preamble", file=self.file)
 
     def print_loader(self):
-        print >>self.file, "_libs = {}"
-        print >>self.file, "_libdirs = %s" % self.options.compile_libdirs
-        print >>self.file
-        print >>self.file, "# Begin loader"
-        print >>self.file
+        print("_libs = {}", file=self.file)
+        print("_libdirs = %s" % self.options.compile_libdirs, file=self.file)
+        print(file=self.file)
+        print("# Begin loader", file=self.file)
+        print(file=self.file)
         path = path_to_local_file("libraryloader.py",
                                   ctypesgencore.libraryloader)
         loader_file = file(path, "r")
         self.file.write(loader_file.read())
         loader_file.close()
-        print >>self.file
-        print >>self.file, "# End loader"
-        print >>self.file
-        print >>self.file, "add_library_search_dirs([%s])" % \
-            ", ".join([repr(d) for d in self.options.runtime_libdirs])
+        print(file=self.file)
+        print("# End loader", file=self.file)
+        print(file=self.file)
+        print("add_library_search_dirs([%s])" % \
+            ", ".join([repr(d) for d in self.options.runtime_libdirs]), file=self.file)
 
     def print_library(self, library):
-        print >>self.file, '_libs["%s"] = load_library("%s")' % (library, library)
+        print('_libs["%s"] = load_library("%s")' % (library, library), file=self.file)
 
     def print_module(self, module):
-        print >>self.file, 'from %s import *' % name
+        print('from %s import *' % name, file=self.file)
 
     def print_constant(self, constant):
-        print >>self.file, '%s = %s' % \
-            (constant.name, constant.value.py_string(False)),
+        print('%s = %s' % \
+            (constant.name, constant.value.py_string(False)), end=' ', file=self.file)
         self.srcinfo(constant.src)
 
     def print_typedef(self, typedef):
-        print >>self.file, '%s = %s' % \
-            (typedef.name, typedef.ctype.py_string()),
+        print('%s = %s' % \
+            (typedef.name, typedef.ctype.py_string()), end=' ', file=self.file)
         self.srcinfo(typedef.src)
 
     def print_struct(self, struct):
         self.srcinfo(struct.src)
         base = {'union': 'Union', 'struct': 'Structure'}[struct.variety]
-        print >>self.file, 'class %s_%s(%s):' % \
-            (struct.variety, struct.tag, base)
-        print >>self.file, '    pass'
+        print('class %s_%s(%s):' % \
+            (struct.variety, struct.tag, base), file=self.file)
+        print('    pass', file=self.file)
 
     def print_struct_members(self, struct):
         if struct.opaque:
             return
-        print >>self.file, '%s_%s.__slots__ = [' % (struct.variety, struct.tag)
+        print('%s_%s.__slots__ = [' % (struct.variety, struct.tag), file=self.file)
         for name, ctype in struct.members:
-            print >>self.file, "    '%s'," % name
-        print >>self.file, ']'
-        print >>self.file, '%s_%s._fields_ = [' % (struct.variety, struct.tag)
+            print("    '%s'," % name, file=self.file)
+        print(']', file=self.file)
+        print('%s_%s._fields_ = [' % (struct.variety, struct.tag), file=self.file)
         for name, ctype in struct.members:
             if isinstance(ctype, CtypesBitfield):
-                print >>self.file, "    ('%s', %s, %s)," % \
-                    (name, ctype.py_string(), ctype.bitfield.py_string(False))
+                print("    ('%s', %s, %s)," % \
+                    (name, ctype.py_string(), ctype.bitfield.py_string(False)), file=self.file)
             else:
-                print >>self.file, "    ('%s', %s)," % (name, ctype.py_string())
-        print >>self.file, ']'
+                print("    ('%s', %s)," % (name, ctype.py_string()), file=self.file)
+        print(']', file=self.file)
 
     def print_enum(self, enum):
-        print >>self.file, 'enum_%s = c_int' % enum.tag,
+        print('enum_%s = c_int' % enum.tag, end=' ', file=self.file)
         self.srcinfo(enum.src)
         # Values of enumerator are output as constants.
 
@@ -203,69 +205,69 @@ class WrapperPrinter:
     def print_fixed_function(self, function):
         self.srcinfo(function.src)
         if function.source_library:
-            print >>self.file, "if hasattr(_libs[%r], %r):" % \
-                (function.source_library, function.c_name())
-            print >>self.file, "    %s = _libs[%r].%s" % \
-                (function.py_name(), function.source_library, function.c_name())
-            print >>self.file, "    %s.restype = %s" % \
-                (function.py_name(), function.restype.py_string())
-            print >>self.file, "    %s.argtypes = [%s]" % (
+            print("if hasattr(_libs[%r], %r):" % \
+                (function.source_library, function.c_name()), file=self.file)
+            print("    %s = _libs[%r].%s" % \
+                (function.py_name(), function.source_library, function.c_name()), file=self.file)
+            print("    %s.restype = %s" % \
+                (function.py_name(), function.restype.py_string()), file=self.file)
+            print("    %s.argtypes = [%s]" % (
                 function.py_name(),
-                ', '.join([a.py_string() for a in function.argtypes]))
+                ', '.join([a.py_string() for a in function.argtypes])), file=self.file)
         else:
-            print >>self.file, "for _lib in _libs.values():"
-            print >>self.file, "    if hasattr(_lib, %r):" % function.c_name()
-            print >>self.file, "        %s = _lib.%s" % (function.py_name(), function.c_name())
-            print >>self.file, "        %s.restype = %s" % (
-                function.py_name(), function.restype.py_string())
-            print >>self.file, "        %s.argtypes = [%s]" % (
-                function.py_name(), ', '.join([a.py_string() for a in function.argtypes]))
-            print >>self.file, "        break"
+            print("for _lib in _libs.values():", file=self.file)
+            print("    if hasattr(_lib, %r):" % function.c_name(), file=self.file)
+            print("        %s = _lib.%s" % (function.py_name(), function.c_name()), file=self.file)
+            print("        %s.restype = %s" % (
+                function.py_name(), function.restype.py_string()), file=self.file)
+            print("        %s.argtypes = [%s]" % (
+                function.py_name(), ', '.join([a.py_string() for a in function.argtypes])), file=self.file)
+            print("        break", file=self.file)
 
     def print_variadic_function(self, function):
         self.srcinfo(function.src)
         if function.source_library:
-            print >>self.file, "if hasattr(_libs[%r], %r):" % \
-                (function.source_library, function.c_name())
-            print >>self.file, "    _func = _libs[%r].%s" % \
-                (function.source_library, function.c_name())
-            print >>self.file, "    _restype = %s" % function.restype.py_string()
-            print >>self.file, "    _argtypes = [%s]" % \
-                ', '.join([a.py_string() for a in function.argtypes])
-            print >>self.file, "    %s = _variadic_function(_func,_restype,_argtypes)" % \
-                function.py_name()
+            print("if hasattr(_libs[%r], %r):" % \
+                (function.source_library, function.c_name()), file=self.file)
+            print("    _func = _libs[%r].%s" % \
+                (function.source_library, function.c_name()), file=self.file)
+            print("    _restype = %s" % function.restype.py_string(), file=self.file)
+            print("    _argtypes = [%s]" % \
+                ', '.join([a.py_string() for a in function.argtypes]), file=self.file)
+            print("    %s = _variadic_function(_func,_restype,_argtypes)" % \
+                function.py_name(), file=self.file)
         else:
-            print >>self.file, "for _lib in _libs.values():"
-            print >>self.file, "    if hasattr(_lib, %r):" % function.c_name()
-            print >>self.file, "        _func = _lib.%s" % \
-                (function.c_name())
-            print >>self.file, "        _restype = %s" % function.restype.py_string()
-            print >>self.file, "        _argtypes = [%s]" % \
-                ', '.join([a.py_string() for a in function.argtypes])
-            print >>self.file, "        %s = _variadic_function(_func,_restype,_argtypes)" % \
-                function.py_name()
+            print("for _lib in _libs.values():", file=self.file)
+            print("    if hasattr(_lib, %r):" % function.c_name(), file=self.file)
+            print("        _func = _lib.%s" % \
+                (function.c_name()), file=self.file)
+            print("        _restype = %s" % function.restype.py_string(), file=self.file)
+            print("        _argtypes = [%s]" % \
+                ', '.join([a.py_string() for a in function.argtypes]), file=self.file)
+            print("        %s = _variadic_function(_func,_restype,_argtypes)" % \
+                function.py_name(), file=self.file)
 
     def print_variable(self, variable):
         self.srcinfo(variable.src)
         if variable.source_library:
-            print >>self.file, 'try:'
-            print >>self.file, '    %s = (%s).in_dll(_libs[%r], %r)' % \
+            print('try:', file=self.file)
+            print('    %s = (%s).in_dll(_libs[%r], %r)' % \
                 (variable.py_name(),
                  variable.ctype.py_string(),
                  variable.source_library,
-                 variable.c_name())
-            print >>self.file, 'except:'
-            print >>self.file, '    pass'
+                 variable.c_name()), file=self.file)
+            print('except:', file=self.file)
+            print('    pass', file=self.file)
         else:
-            print >>self.file, "for _lib in _libs.values():"
-            print >>self.file, '    try:'
-            print >>self.file, '        %s = (%s).in_dll(_lib, %r)' % \
+            print("for _lib in _libs.values():", file=self.file)
+            print('    try:', file=self.file)
+            print('        %s = (%s).in_dll(_lib, %r)' % \
                 (variable.py_name(),
                  variable.ctype.py_string(),
-                 variable.c_name())
-            print >>self.file, "        break"
-            print >>self.file, '    except:'
-            print >>self.file, '        pass'
+                 variable.c_name()), file=self.file)
+            print("        break", file=self.file)
+            print('    except:', file=self.file)
+            print('        pass', file=self.file)
 
     def print_macro(self, macro):
         if macro.params:
@@ -278,16 +280,16 @@ class WrapperPrinter:
         # We want to contain the failures as much as possible.
         # Hence the try statement.
         self.srcinfo(macro.src)
-        print >>self.file, "try:"
-        print >>self.file, "    %s = %s" % (macro.name, macro.expr.py_string(True))
-        print >>self.file, "except:"
-        print >>self.file, "    pass"
+        print("try:", file=self.file)
+        print("    %s = %s" % (macro.name, macro.expr.py_string(True)), file=self.file)
+        print("except:", file=self.file)
+        print("    pass", file=self.file)
 
     def print_func_macro(self, macro):
         self.srcinfo(macro.src)
-        print >>self.file, "def %s(%s):" % \
-            (macro.name, ", ".join(macro.params))
-        print >>self.file, "    return %s" % macro.expr.py_string(True)
+        print("def %s(%s):" % \
+            (macro.name, ", ".join(macro.params)), file=self.file)
+        print("    return %s" % macro.expr.py_string(True), file=self.file)
 
     def insert_file(self, filename):
         try:
@@ -296,10 +298,10 @@ class WrapperPrinter:
             error_message("Cannot open file \"%s\". Skipped it." % filename,
                           cls='missing-file')
 
-        print >>self.file, "# Begin \"%s\"" % filename
-        print >>self.file
+        print("# Begin \"%s\"" % filename, file=self.file)
+        print(file=self.file)
         self.file.write(inserted_file.read())
-        print >>self.file
-        print >>self.file, "# End \"%s\"" % filename
+        print(file=self.file)
+        print("# End \"%s\"" % filename, file=self.file)
 
         inserted_file.close()

+ 1 - 1
lib/python/ctypes/ctypesgencore/processor/__init__.py

@@ -9,4 +9,4 @@ A convenience_function, process(), calls everything else.
 
 __all__ = ["process"]
 
-from pipeline import process
+from .pipeline import process