Commit 3ee07211 authored by Robert Bradshaw's avatar Robert Bradshaw

merge

parents f7b4f6fe a7bd92e2
......@@ -210,7 +210,7 @@ class CCodeWriter:
storage_class = "static"
if storage_class:
self.put("%s " % storage_class)
if visibility <> 'public':
if visibility != 'public':
dll_linkage = None
self.put(entry.type.declaration_code(entry.cname,
dll_linkage = dll_linkage))
......
......@@ -311,7 +311,7 @@ class ExprNode(Node):
def allocate_target_temps(self, env, rhs):
# Perform temp allocation for the LHS of an assignment.
if debug_temp_alloc:
print self, "Allocating target temps"
print("%s Allocating target temps" % self)
self.allocate_subexpr_temps(env)
self.result_code = self.target_code()
if rhs:
......@@ -325,7 +325,7 @@ class ExprNode(Node):
# is used as the result instead of allocating a new
# one.
if debug_temp_alloc:
print self, "Allocating temps"
print("%s Allocating temps" % self)
self.allocate_subexpr_temps(env)
self.allocate_temp(env, result)
if self.is_temp:
......@@ -335,11 +335,11 @@ class ExprNode(Node):
# Allocate temporary variables for all sub-expressions
# of this node.
if debug_temp_alloc:
print self, "Allocating temps for:", self.subexprs
print("%s Allocating temps for: %s" % (self, self.subexprs))
for node in self.subexpr_nodes():
if node:
if debug_temp_alloc:
print self, "Allocating temps for", node
print("%s Allocating temps for %s" % (self, node))
node.allocate_temps(env)
def allocate_temp(self, env, result = None):
......@@ -350,7 +350,7 @@ class ExprNode(Node):
# is used as the result instead of allocating a new
# one.
if debug_temp_alloc:
print self, "Allocating temp"
print("%s Allocating temp" % self)
if result:
if not self.is_temp:
raise InternalError("Result forced on non-temp node")
......@@ -364,7 +364,7 @@ class ExprNode(Node):
else:
self.result_code = None
if debug_temp_alloc:
print self, "Allocated result", self.result_code
print("%s Allocated result %s" % (self, self.result_code))
else:
self.result_code = self.calculate_result_code()
......@@ -384,7 +384,7 @@ class ExprNode(Node):
# otherwise release results of its sub-expressions.
if self.is_temp:
if debug_temp_alloc:
print self, "Releasing result", self.result_code
print("%s Releasing result %s" % (self, self.result_code))
env.release_temp(self.result_code)
else:
self.release_subexpr_temps(env)
......@@ -489,7 +489,7 @@ class ExprNode(Node):
src = CoerceFromPyTypeNode(dst_type, src, env)
else: # neither src nor dst are py types
# Added the string comparison, since for c types that
# is enough, but SageX gets confused when the types are
# is enough, but Cython gets confused when the types are
# in different files.
if not (str(src.type) == str(dst_type) or dst_type.assignable_from(src_type)):
error(self.pos, "Cannot assign type '%s' to '%s'" %
......@@ -588,7 +588,7 @@ class BoolNode(PyConstNode):
def coerce_to(self, dst_type, env):
value = self.value
if dst_type.is_numeric:
return IntNode(self.pos, value=self.value).coerce_to(dst_type, env)
return IntNode(self.pos, value=int(self.value)).coerce_to(dst_type, env)
else:
return PyConstNode.coerce_to(self, dst_type, env)
......@@ -977,8 +977,8 @@ class NameNode(AtomicExprNode):
entry.name,
rhs.py_result()))
if debug_disposal_code:
print "NameNode.generate_assignment_code:"
print "...generating disposal code for", rhs
print("NameNode.generate_assignment_code:")
print("...generating disposal code for %s" % rhs)
rhs.generate_disposal_code(code)
else:
......@@ -991,8 +991,8 @@ class NameNode(AtomicExprNode):
code.put_decref(self.result_code, self.ctype())
code.putln('%s = %s;' % (self.result_code, rhs.result_as(self.ctype())))
if debug_disposal_code:
print "NameNode.generate_assignment_code:"
print "...generating post-assignment code for", rhs
print("NameNode.generate_assignment_code:")
print("...generating post-assignment code for %s" % rhs)
rhs.generate_post_assignment_code(code)
def generate_deletion_code(self, code):
......@@ -2139,8 +2139,8 @@ class SequenceNode(ExprNode):
rhs.generate_disposal_code(code)
for i in range(len(self.args)):
item = self.unpacked_items[i]
unpack_code = "__Pyx_UnpackItem(%s)" % (
self.iterator.py_result())
unpack_code = "__Pyx_UnpackItem(%s, %d)" % (
self.iterator.py_result(), i)
code.putln(
"%s = %s; %s" % (
item.result_code,
......@@ -2153,8 +2153,8 @@ class SequenceNode(ExprNode):
"__Pyx_EndUnpack(%s)" % (
self.iterator.py_result()))
if debug_disposal_code:
print "UnpackNode.generate_assignment_code:"
print "...generating disposal code for", iterator
print("UnpackNode.generate_assignment_code:")
print("...generating disposal code for %s" % iterator)
self.iterator.generate_disposal_code(code)
code.putln("}")
......@@ -2261,7 +2261,7 @@ class ListComprehensionNode(SequenceNode):
def allocate_temps(self, env, result = None):
if debug_temp_alloc:
print self, "Allocating temps"
print("%s Allocating temps" % self)
self.allocate_temp(env, result)
self.loop.analyse_declarations(env)
self.loop.analyse_expressions(env)
......@@ -3578,7 +3578,7 @@ class CoercionNode(ExprNode):
self.pos = arg.pos
self.arg = arg
if debug_coercion:
print self, "Coercing", self.arg
print("%s Coercing %s" % (self, self.arg))
def annotate(self, code):
self.arg.annotate(code)
......@@ -3906,18 +3906,20 @@ bad:
unpacking_utility_code = [
"""
static PyObject *__Pyx_UnpackItem(PyObject *); /*proto*/
static PyObject *__Pyx_UnpackItem(PyObject *, Py_ssize_t index); /*proto*/
static int __Pyx_EndUnpack(PyObject *); /*proto*/
""","""
static void __Pyx_UnpackError(void) {
PyErr_SetString(PyExc_ValueError, "unpack sequence of wrong size");
}
static PyObject *__Pyx_UnpackItem(PyObject *iter) {
static PyObject *__Pyx_UnpackItem(PyObject *iter, Py_ssize_t index) {
PyObject *item;
if (!(item = PyIter_Next(iter))) {
if (!PyErr_Occurred())
__Pyx_UnpackError();
if (!PyErr_Occurred()) {
PyErr_Format(PyExc_ValueError,
#if PY_VERSION_HEX < 0x02050000
"need more than %d values to unpack", (int)index);
#else
"need more than %zd values to unpack", index);
#endif
}
}
return item;
}
......@@ -3926,7 +3928,7 @@ static int __Pyx_EndUnpack(PyObject *iter) {
PyObject *item;
if ((item = PyIter_Next(iter))) {
Py_DECREF(item);
__Pyx_UnpackError();
PyErr_SetString(PyExc_ValueError, "too many values to unpack");
return -1;
}
else if (!PyErr_Occurred())
......
......@@ -61,7 +61,9 @@ def make_lexicon():
two_oct = octdigit + octdigit
three_oct = octdigit + octdigit + octdigit
two_hex = hexdigit + hexdigit
escapeseq = Str("\\") + (two_oct | three_oct | two_hex | AnyChar)
four_hex = two_hex + two_hex
escapeseq = Str("\\") + (two_oct | three_oct | two_hex |
Str('u') + four_hex | Str('x') + two_hex | AnyChar)
bra = Any("([{")
ket = Any(")]}")
......
......@@ -48,13 +48,13 @@ class Context:
# that module, provided its name is not a dotted name.
debug_find_module = 0
if debug_find_module:
print "Context.find_module: module_name =", module_name, \
"relative_to =", relative_to, "pos =", pos, "need_pxd =", need_pxd
print("Context.find_module: module_name = %s, relative_to = %s, pos = %s, need_pxd = %s" % (
module_name, relative_to, pos, need_pxd))
scope = None
pxd_pathname = None
if "." not in module_name and relative_to:
if debug_find_module:
print "...trying relative import"
print("...trying relative import")
scope = relative_to.lookup_submodule(module_name)
if not scope:
qualified_name = relative_to.qualify_name(module_name)
......@@ -63,28 +63,28 @@ class Context:
scope = relative_to.find_submodule(module_name)
if not scope:
if debug_find_module:
print "...trying absolute import"
print("...trying absolute import")
scope = self
for name in module_name.split("."):
scope = scope.find_submodule(name)
if debug_find_module:
print "...scope =", scope
print("...scope =", scope)
if not scope.pxd_file_loaded:
if debug_find_module:
print "...pxd not loaded"
print("...pxd not loaded")
scope.pxd_file_loaded = 1
if not pxd_pathname:
if debug_find_module:
print "...looking for pxd file"
print("...looking for pxd file")
pxd_pathname = self.find_pxd_file(module_name, pos)
if debug_find_module:
print "......found ", pxd_pathname
print("......found ", pxd_pathname)
if not pxd_pathname and need_pxd:
error(pos, "'%s.pxd' not found" % module_name)
if pxd_pathname:
try:
if debug_find_module:
print "Context.find_module: Parsing", pxd_pathname
print("Context.find_module: Parsing %s" % pxd_pathname)
pxd_tree = self.parse(pxd_pathname, scope.type_names, pxd = 1,
full_module_name = module_name)
pxd_tree.analyse_declarations(scope)
......
......@@ -562,7 +562,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
for entry in env.c_class_entries:
#print "generate_typeobj_definitions:", entry.name
#print "...visibility =", entry.visibility
if entry.visibility <> 'extern':
if entry.visibility != 'extern':
type = entry.type
scope = type.scope
if scope: # could be None if there was an error
......@@ -681,7 +681,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
% scope.mangle_internal("tp_dealloc"))
py_attrs = []
for entry in scope.var_entries:
if entry.type.is_pyobject and entry.name <> "__weakref__":
if entry.type.is_pyobject and entry.name != "__weakref__":
py_attrs.append(entry)
if py_attrs or scope.lookup_here("__weakref__"):
self.generate_self_cast(scope, code)
......@@ -1520,7 +1520,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_base_type_import_code(self, env, entry, code):
base_type = entry.type.base_type
if base_type and base_type.module_name <> env.qualified_name:
if base_type and base_type.module_name != env.qualified_name:
self.generate_type_import_code(env, base_type, self.pos, code)
def use_type_import_utility_code(self, env):
......@@ -1569,7 +1569,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility <> 'extern':
if entry.visibility != 'extern':
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
......
......@@ -943,7 +943,7 @@ class CFuncDefNode(FuncDefNode):
dll_linkage = None
header = self.return_type.declaration_code(entity,
dll_linkage = dll_linkage)
if visibility <> 'private':
if visibility != 'private':
storage_class = "%s " % Naming.extern_c_macro
else:
storage_class = "static "
......@@ -1094,6 +1094,8 @@ class DefNode(FuncDefNode):
if self.signature_has_generic_args():
if self.star_arg:
env.use_utility_code(get_stararg_utility_code)
elif self.signature_has_generic_args():
env.use_utility_code(raise_argtuple_too_long_utility_code)
if not self.signature_has_nongeneric_args():
env.use_utility_code(get_keyword_string_check_utility_code)
elif self.starstar_arg:
......@@ -1566,9 +1568,8 @@ class DefNode(FuncDefNode):
def generate_positional_args_check(self, code, nargs):
code.putln("if (unlikely(PyTuple_GET_SIZE(%s) > %d)) {" % (
Naming.args_cname, nargs))
error_message = "function takes at most %d positional arguments (%d given)"
code.putln("PyErr_Format(PyExc_TypeError, \"%s\", %d, PyTuple_GET_SIZE(%s));" % (
error_message, nargs, Naming.args_cname))
code.putln("__Pyx_RaiseArgtupleTooLong(%d, PyTuple_GET_SIZE(%s));" % (
nargs, Naming.args_cname))
code.putln("return %s;" % self.error_value())
code.putln("}")
......@@ -3147,7 +3148,7 @@ class TryFinallyStatNode(StatNode):
"__pyx_why = 0; goto %s;" % catch_label)
for i in cases_used:
new_label = new_labels[i]
#if new_label and new_label <> "<try>":
#if new_label and new_label != "<try>":
if new_label == new_error_label and self.preserve_exception:
self.put_error_catcher(code,
new_error_label, i+1, catch_label)
......@@ -3550,6 +3551,7 @@ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
Py_INCREF(type);
}
else {
type = 0;
PyErr_SetString(PyExc_TypeError,
"raise: exception must be an old-style class or instance");
goto raise_error;
......@@ -3646,6 +3648,30 @@ static INLINE int __Pyx_SplitStarArg(
}
"""]
#------------------------------------------------------------------------------------
#
# __Pyx_RaiseArgtupleTooLong raises the correct exception when too
# many positional arguments were found. This handles Py_ssize_t
# formatting correctly.
raise_argtuple_too_long_utility_code = [
"""
static INLINE void __Pyx_RaiseArgtupleTooLong(Py_ssize_t num_expected, Py_ssize_t num_found); /*proto*/
""","""
static INLINE void __Pyx_RaiseArgtupleTooLong(
Py_ssize_t num_expected,
Py_ssize_t num_found)
{
const char* error_message =
#if PY_VERSION_HEX < 0x02050000
"function takes at most %d positional arguments (%d given)";
#else
"function takes at most %zd positional arguments (%zd given)";
#endif
PyErr_Format(PyExc_TypeError, error_message, num_expected, num_found);
}
"""]
#------------------------------------------------------------------------------------
#
# __Pyx_CheckKeywordStrings raises an error if non-string keywords
......
......@@ -24,7 +24,7 @@ def p_ident_list(s):
while s.sy == 'IDENT':
names.append(s.systring)
s.next()
if s.sy <> ',':
if s.sy != ',':
break
s.next()
return names
......@@ -290,7 +290,7 @@ def p_call(s, function):
s.error("Non-keyword arg following keyword arg",
pos = arg.pos)
positional_args.append(arg)
if s.sy <> ',':
if s.sy != ',':
break
s.next()
if s.sy == '*':
......@@ -376,11 +376,11 @@ def p_subscript(s):
return [ExprNodes.EllipsisNode(pos)]
else:
start = p_slice_element(s, (':',))
if s.sy <> ':':
if s.sy != ':':
return [start]
s.next()
stop = p_slice_element(s, (':', ',', ']'))
if s.sy <> ':':
if s.sy != ':':
return [start, stop]
s.next()
step = p_slice_element(s, (':', ',', ']'))
......@@ -469,9 +469,9 @@ def p_atom(s):
if name == "None":
return ExprNodes.NoneNode(pos)
elif name == "True":
return ExprNodes.BoolNode(pos, value=1)
return ExprNodes.BoolNode(pos, value=True)
elif name == "False":
return ExprNodes.BoolNode(pos, value=0)
return ExprNodes.BoolNode(pos, value=False)
else:
return p_name(s, name)
elif sy == 'NULL':
......@@ -489,7 +489,9 @@ def p_name(s, name):
pass
else:
rep = repr(value)
if isinstance(value, int):
if isinstance(value, bool):
return ExprNodes.BoolNode(pos, value = value)
elif isinstance(value, int):
return ExprNodes.IntNode(pos, value = rep)
elif isinstance(value, long):
return ExprNodes.LongNode(pos, value = rep)
......@@ -506,7 +508,7 @@ def p_cat_string_literal(s):
# A sequence of one or more adjacent string literals.
# Returns (kind, value) where kind in ('', 'c', 'r')
kind, value = p_string_literal(s)
if kind <> 'c':
if kind != 'c':
strings = [value]
while s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
next_kind, next_value = p_string_literal(s)
......@@ -563,12 +565,19 @@ def p_string_literal(s):
c = systr[1]
if c in "'\"\\abfnrtv01234567":
chars.append(systr)
elif c == 'x':
chars.append('\\x0' + systr[2:])
elif c == '\n':
pass
elif c == 'u':
chars.append(systr)
elif c in 'ux':
if kind == 'u':
try:
chars.append(systr.decode('unicode_escape'))
except UnicodeDecodeError:
s.error("Invalid unicode escape '%s'" % systr,
pos = pos)
elif c == 'x':
chars.append('\\x0' + systr[2:])
else:
chars.append(systr)
else:
chars.append(r'\\' + systr[1:])
elif sy == 'NEWLINE':
......@@ -582,10 +591,7 @@ def p_string_literal(s):
"Unexpected token %r:%r in string literal" %
(sy, s.systring))
s.next()
if kind == 'u':
value = u''.join(chars)
else:
value = ''.join(chars)
value = ''.join(chars)
#print "p_string_literal: value =", repr(value) ###
return kind, value
......@@ -606,7 +612,7 @@ def unquote(s):
# Split into double quotes, newlines, escape sequences
# and spans of regular chars
l1 = re.split(r'((?:\\[0-7]{1,3})|(?:\\x[0-9A-Fa-f]{2})|(?:\\.)|(?:\\\n)|(?:\n)|")', s)
print "unquote: l1 =", l1 ###
#print "unquote: l1 =", l1 ###
l2 = []
for item in l1:
if item == '"' or item == '\n':
......@@ -695,12 +701,12 @@ def p_dict_maker(s):
pos = s.position()
s.next()
items = []
while s.sy <> '}':
while s.sy != '}':
key = p_simple_expr(s)
s.expect(':')
value = p_simple_expr(s)
items.append((key, value))
if s.sy <> ',':
if s.sy != ',':
break
s.next()
s.expect('}')
......@@ -718,7 +724,7 @@ def p_simple_expr_list(s):
exprs = []
while s.sy not in expr_terminators:
exprs.append(p_simple_expr(s))
if s.sy <> ',':
if s.sy != ',':
break
s.next()
return exprs
......@@ -830,7 +836,7 @@ def find_parallel_assignment_size(input):
rhs_size = len(rhs.args)
for lhs in input[:-1]:
lhs_size = len(lhs.args)
if lhs_size <> rhs_size:
if lhs_size != rhs_size:
error(lhs.pos, "Unpacking sequence of wrong size (expected %d, got %d)"
% (lhs_size, rhs_size))
return -1
......@@ -1092,10 +1098,10 @@ def p_for_bounds(s):
if not target.is_name:
error(target.pos,
"Target of for-from statement must be a variable name")
elif name2 <> target.name:
elif name2 != target.name:
error(name2_pos,
"Variable name in for-from range does not match target")
if rel1[0] <> rel2[0]:
if rel1[0] != rel2[0]:
error(rel2_pos,
"Relation directions in for-from do not match")
return {'target': target,
......@@ -1129,9 +1135,9 @@ def p_for_target(s):
if s.sy == ',':
s.next()
exprs = [expr]
while s.sy <> 'in':
while s.sy != 'in':
exprs.append(p_bit_expr(s))
if s.sy <> ',':
if s.sy != ',':
break
s.next()
return ExprNodes.TupleNode(pos, args = exprs)
......@@ -1173,7 +1179,7 @@ def p_except_clause(s):
s.next()
exc_type = None
exc_value = None
if s.sy <> ':':
if s.sy != ':':
exc_type = p_simple_expr(s)
if s.sy == ',':
s.next()
......@@ -1293,7 +1299,7 @@ def p_IF_statement(s, level, cdef_flag, visibility, api):
if s.compile_time_eval:
result = body
current_eval = 0
if s.sy <> 'ELIF':
if s.sy != 'ELIF':
break
if s.sy == 'ELSE':
s.next()
......@@ -1344,7 +1350,7 @@ def p_statement(s, level, cdef_flag = 0, visibility = 'private', api = 0):
s.level = level
return p_def_statement(s)
elif s.sy == 'class':
if level <> 'module':
if level != 'module':
s.error("class definition not allowed here")
return p_class_statement(s)
elif s.sy == 'include':
......@@ -1353,7 +1359,7 @@ def p_statement(s, level, cdef_flag = 0, visibility = 'private', api = 0):
return p_include_statement(s, level)
elif level == 'c_class' and s.sy == 'IDENT' and s.systring == 'property':
return p_property_decl(s)
elif s.sy == 'pass' and level <> 'property':
elif s.sy == 'pass' and level != 'property':
return p_pass_statement(s, with_newline = 1)
else:
if level in ('c_class_pxd', 'property'):
......@@ -1539,7 +1545,7 @@ def p_c_declarator(s, empty = 0, is_type = 0, cmethod_flag = 0, assignable = 0,
s.expect(')')
else:
result = p_c_simple_declarator(s, empty, is_type, cmethod_flag, assignable, nonempty)
if not calling_convention_allowed and result.calling_convention and s.sy <> '(':
if not calling_convention_allowed and result.calling_convention and s.sy != '(':
error(s.position(), "%s on something that is not a function"
% result.calling_convention)
while s.sy in ('[', '('):
......@@ -1555,7 +1561,7 @@ def p_c_declarator(s, empty = 0, is_type = 0, cmethod_flag = 0, assignable = 0,
def p_c_array_declarator(s, base):
pos = s.position()
s.next() # '['
if s.sy <> ']':
if s.sy != ']':
dim = p_expr(s)
else:
dim = None
......@@ -1789,7 +1795,7 @@ def p_c_enum_definition(s, pos, level, visibility, typedef_flag = 0):
items = None
s.expect(':')
items = []
if s.sy <> 'NEWLINE':
if s.sy != 'NEWLINE':
p_c_enum_line(s, items)
else:
s.next() # 'NEWLINE'
......@@ -1802,7 +1808,7 @@ def p_c_enum_definition(s, pos, level, visibility, typedef_flag = 0):
in_pxd = level == 'module_pxd')
def p_c_enum_line(s, items):
if s.sy <> 'pass':
if s.sy != 'pass':
p_c_enum_item(s, items)
while s.sy == ',':
s.next()
......@@ -1837,8 +1843,8 @@ def p_c_struct_or_union_definition(s, pos, level, visibility, typedef_flag = 0):
s.expect('NEWLINE')
s.expect_indent()
attributes = []
while s.sy <> 'DEDENT':
if s.sy <> 'pass':
while s.sy != 'DEDENT':
if s.sy != 'pass':
attributes.append(
p_c_func_or_var_declaration(s, level = 'other', pos = s.position()))
else:
......@@ -1857,7 +1863,7 @@ def p_visibility(s, prev_visibility):
visibility = prev_visibility
if s.sy == 'IDENT' and s.systring in ('extern', 'public', 'readonly'):
visibility = s.systring
if prev_visibility <> 'private' and visibility <> prev_visibility:
if prev_visibility != 'private' and visibility != prev_visibility:
s.error("Conflicting visibility options '%s' and '%s'"
% (prev_visibility, visibility))
s.next()
......@@ -1995,7 +2001,7 @@ def p_c_class_definition(s, level, pos,
s.next()
module_path.append(class_name)
class_name = p_ident(s)
if module_path and visibility <> 'extern':
if module_path and visibility != 'extern':
error(pos, "Qualified class name only allowed for 'extern' C class")
if module_path and s.sy == 'IDENT' and s.systring == 'as':
s.next()
......@@ -2067,7 +2073,7 @@ def p_c_class_options(s):
typeobj_name = None
s.expect('[')
while 1:
if s.sy <> 'IDENT':
if s.sy != 'IDENT':
break
if s.systring == 'object':
s.next()
......@@ -2075,7 +2081,7 @@ def p_c_class_options(s):
elif s.systring == 'type':
s.next()
typeobj_name = p_ident(s)
if s.sy <> ',':
if s.sy != ',':
break
s.next()
s.expect(']', "Expected 'object' or 'type'")
......@@ -2091,7 +2097,7 @@ def p_property_decl(s):
def p_doc_string(s):
if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
_, result = p_cat_string_literal(s)
if s.sy <> 'EOF':
if s.sy != 'EOF':
s.expect_newline("Syntax error in doc string")
return result
else:
......@@ -2106,7 +2112,7 @@ def p_module(s, pxd, full_module_name):
else:
level = 'module'
body = p_statement_list(s, level)
if s.sy <> 'EOF':
if s.sy != 'EOF':
s.error("Syntax error in statement [%s,%s]" % (
repr(s.sy), repr(s.systring)))
return ModuleNode(pos, doc = doc, body = body, full_module_name = full_module_name)
......@@ -2138,7 +2144,7 @@ def print_parse_tree(f, node, level, key = None):
tag = node.__class__.__name__
f.write("%s @ %s\n" % (tag, node.pos))
for name, value in node.__dict__.items():
if name <> 'tag' and name <> 'pos':
if name != 'tag' and name != 'pos':
print_parse_tree(f, value, level+1, name)
return
elif t == ListType:
......
......@@ -630,7 +630,7 @@ class CFuncType(CType):
if not self.is_overridable and other_type.is_overridable:
return 0
nargs = len(self.args)
if nargs <> len(other_type.args):
if nargs != len(other_type.args):
return 0
# When comparing C method signatures, the first argument
# is exempt from compatibility checking (the proper check
......@@ -639,9 +639,9 @@ class CFuncType(CType):
if not self.args[i].type.same_as(
other_type.args[i].type):
return 0
if self.has_varargs <> other_type.has_varargs:
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count <> other_type.optional_arg_count:
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.same_as(other_type.return_type):
return 0
......@@ -695,7 +695,7 @@ class CFuncType(CType):
if not other_type.is_cfunction:
return 0
nargs = len(self.args)
if nargs <> len(other_type.args):
if nargs != len(other_type.args):
return 0
for i in range(as_cmethod, nargs):
if not self.args[i].type.subtype_of_resolved_type(other_type.args[i].type):
......@@ -703,9 +703,9 @@ class CFuncType(CType):
else:
self.args[i].needs_type_test = other_type.args[i].needs_type_test \
or not self.args[i].type.same_as(other_type.args[i].type)
if self.has_varargs <> other_type.has_varargs:
if self.has_varargs != other_type.has_varargs:
return 0
if self.optional_arg_count <> other_type.optional_arg_count:
if self.optional_arg_count != other_type.optional_arg_count:
return 0
if not self.return_type.subtype_of_resolved_type(other_type.return_type):
return 0
......
......@@ -42,7 +42,7 @@ def hash_source_file(path):
f = open(path, "rU")
text = f.read()
except IOError, e:
print "Unable to hash scanner source file (%s)" % e
print("Unable to hash scanner source file (%s)" % e)
return ""
finally:
f.close()
......@@ -69,12 +69,12 @@ def open_pickled_lexicon(expected_hash):
result = f
f = None
else:
print "Lexicon hash mismatch:" ###
print " expected", expected_hash ###
print " got ", actual_hash ###
print("Lexicon hash mismatch:") ###
print(" expected " + expected_hash) ###
print(" got " + actual_hash) ###
except IOError, e:
print "Warning: Unable to read pickled lexicon", lexicon_pickle
print e
print("Warning: Unable to read pickled lexicon " + lexicon_pickle)
print(e)
if f:
f.close()
return result
......@@ -89,37 +89,37 @@ def try_to_unpickle_lexicon():
if f:
if notify_lexicon_unpickling:
t0 = time()
print "Unpickling lexicon..."
print("Unpickling lexicon...")
lexicon = pickle.load(f)
f.close()
if notify_lexicon_unpickling:
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def create_new_lexicon():
global lexicon
t0 = time()
print "Creating lexicon..."
print("Creating lexicon...")
lexicon = make_lexicon()
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def pickle_lexicon():
f = None
try:
f = open(lexicon_pickle, "wb")
except IOError:
print "Warning: Unable to save pickled lexicon in", lexicon_pickle
print("Warning: Unable to save pickled lexicon in " + lexicon_pickle)
if f:
if notify_lexicon_pickling:
t0 = time()
print "Pickling lexicon..."
print("Pickling lexicon...")
pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
pickle.dump(lexicon, f, binary_lexicon_pickle)
f.close()
if notify_lexicon_pickling:
t1 = time()
print "Done (%.2f seconds)" % (t1 - t0)
print("Done (%.2f seconds)" % (t1 - t0))
def get_lexicon():
global lexicon
......@@ -284,9 +284,9 @@ class PyrexScanner(Scanner):
self.indentation_char = c
#print "Scanner.indentation_action: setting indent_char to", repr(c)
else:
if self.indentation_char <> c:
if self.indentation_char != c:
self.error("Mixed use of tabs and spaces")
if text.replace(c, "") <> "":
if text.replace(c, "") != "":
self.error("Mixed use of tabs and spaces")
# Figure out how many indents/dedents to do
current_level = self.current_level()
......@@ -304,7 +304,7 @@ class PyrexScanner(Scanner):
self.indentation_stack.pop()
self.produce('DEDENT', '')
#print "...current level now", self.current_level() ###
if new_level <> self.current_level():
if new_level != self.current_level():
self.error("Inconsistent indentation")
def eof_action(self, text):
......@@ -328,7 +328,7 @@ class PyrexScanner(Scanner):
t = self.sy
else:
t = "%s %s" % (self.sy, self.systring)
print "--- %3d %2d %s" % (line, col, t)
print("--- %3d %2d %s" % (line, col, t))
def put_back(self, sy, systring):
self.unread(self.sy, self.systring)
......@@ -380,5 +380,5 @@ class PyrexScanner(Scanner):
def expect_newline(self, message = "Expected a newline"):
# Expect either a newline or end of file
if self.sy <> 'EOF':
if self.sy != 'EOF':
self.expect('NEWLINE', message)
......@@ -304,12 +304,12 @@ class Scope:
return entry
def check_previous_typedef_flag(self, entry, typedef_flag, pos):
if typedef_flag <> entry.type.typedef_flag:
if typedef_flag != entry.type.typedef_flag:
error(pos, "'%s' previously declared using '%s'" % (
entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
def check_previous_visibility(self, entry, visibility, pos):
if entry.visibility <> visibility:
if entry.visibility != visibility:
error(pos, "'%s' previously declared as '%s'" % (
entry.name, entry.visibility))
......@@ -334,7 +334,7 @@ class Scope:
cname = None, visibility = 'private', is_cdef = 0):
# Add an entry for a variable.
if not cname:
if visibility <> 'private':
if visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
......@@ -361,24 +361,24 @@ class Scope:
# Add an entry for a C function.
entry = self.lookup_here(name)
if entry:
if visibility <> 'private' and visibility <> entry.visibility:
if visibility != 'private' and visibility != entry.visibility:
warning(pos, "Function '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
else:
if not cname:
if api or visibility <> 'private':
if api or visibility != 'private':
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.add_cfunction(name, type, pos, cname, visibility)
entry.func_cname = cname
if in_pxd and visibility <> 'extern':
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
if api:
entry.api = 1
if not defining and not in_pxd and visibility <> 'extern':
if not defining and not in_pxd and visibility != 'extern':
error(pos, "Non-extern C function declared but not defined")
return entry
......@@ -442,7 +442,7 @@ class Scope:
# Python identifier, it will be interned.
if not entry.pystring_cname:
value = entry.init
if identifier_pattern.match(value):
if identifier_pattern.match(value) and isinstance(value, str):
entry.pystring_cname = self.intern(value)
entry.is_interned = 1
else:
......@@ -577,12 +577,6 @@ class BuiltinScope(Scope):
else:
Scope.__init__(self, "__builtin__", PreImportScope(), None)
for name, definition in self.builtin_functions.iteritems():
if len(definition) < 4: definition.append(None) # exception_value
if len(definition) < 5: definition.append(False) # exception_check
cname, type, arg_types, exception_value, exception_check = definition
function = CFuncType(type, [CFuncTypeArg("", t, None) for t in arg_types], False, exception_value, exception_check)
self.add_cfunction(name, function, None, cname, False)
for name, definition in self.builtin_entries.iteritems():
cname, type = definition
self.declare_var(name, type, None, cname)
......@@ -612,30 +606,13 @@ class BuiltinScope(Scope):
def builtin_scope(self):
return self
# TODO: merge this into builtin_function_table when error handling in Pyrex
# is fixed. Also handle pyrex types as functions.
builtin_functions = {
"cmp": ["PyObject_Compare", c_int_type, (py_object_type, py_object_type), None, True],
"unicode": ["PyObject_Unicode", py_object_type, (py_object_type, ), 0],
"type": ["PyObject_Type", py_object_type, (py_object_type, ), 0],
# "str": ["PyObject_Str", py_object_type, (py_object_type, ), 0],
# "int": ["PyNumber_Int", py_object_type, (py_object_type, ), 0],
# "long": ["PyNumber_Long", py_object_type, (py_object_type, ), 0],
# "float": ["PyNumber_Float", py_object_type, (py_object_type, ), 0],
# "list": ["PyNumber_List", py_object_type, (py_object_type, ), 0],
# "tuple": ["PySequence_Tuple", py_object_type, (py_object_type, ), 0],
}
builtin_entries = {
"int": ["((PyObject*)&PyInt_Type)", py_object_type],
"long": ["((PyObject*)&PyLong_Type)", py_object_type],
"float": ["((PyObject*)&PyFloat_Type)", py_object_type],
"str": ["((PyObject*)&PyString_Type)", py_object_type],
"unicode":["((PyObject*)&PyUnicode_Type)", py_object_type],
"tuple": ["((PyObject*)&PyTuple_Type)", py_object_type],
"list": ["((PyObject*)&PyList_Type)", py_object_type],
"dict": ["((PyObject*)&PyDict_Type)", py_object_type],
......@@ -672,6 +649,7 @@ class ModuleScope(Scope):
# cimported_modules [ModuleScope] Modules imported with cimport
# intern_map {string : string} Mapping from Python names to interned strs
# interned_names [string] Interned names pending generation of declarations
# interned_nums [int/long] Interned numeric constants
# all_pystring_entries [Entry] Python string consts from all scopes
# types_imported {PyrexType : 1} Set of types for which import code generated
......@@ -871,7 +849,7 @@ class ModuleScope(Scope):
entry = None # Will cause an error when we redeclare it
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
if base_type <> type.base_type:
if base_type != type.base_type:
error(pos, "Base type does not match previous declaration")
#
# Make a new entry if needed
......@@ -920,17 +898,17 @@ class ModuleScope(Scope):
entry.defined_in_pxd = 1
if implementing: # So that filenames in runtime exceptions refer to
entry.pos = pos # the .pyx file and not the .pxd file
if visibility <> 'private' and entry.visibility <> visibility:
if visibility != 'private' and entry.visibility != visibility:
error(pos, "Class '%s' previously declared as '%s'"
% (name, entry.visibility))
if api:
entry.api = 1
if objstruct_cname:
if type.objstruct_cname and type.objstruct_cname <> objstruct_cname:
if type.objstruct_cname and type.objstruct_cname != objstruct_cname:
error(pos, "Object struct name differs from previous declaration")
type.objstruct_cname = objstruct_cname
if typeobj_cname:
if type.typeobj_cname and type.typeobj_cname <> typeobj_cname:
if type.typeobj_cname and type.typeobj_cname != typeobj_cname:
error(pos, "Type object name differs from previous declaration")
type.typeobj_cname = typeobj_cname
#
......@@ -974,12 +952,12 @@ class ModuleScope(Scope):
#
debug_check_c_classes = 0
if debug_check_c_classes:
print "Scope.check_c_classes: checking scope", self.qualified_name
print("Scope.check_c_classes: checking scope " + self.qualified_name)
for entry in self.c_class_entries:
if debug_check_c_classes:
print "...entry", entry.name, entry
print "......type =", entry.type
print "......visibility =", entry.visibility
print("...entry %s %s" % (entry.name, entry))
print("......type = " + entry.type)
print("......visibility = " + entry.visibility)
type = entry.type
name = entry.name
visibility = entry.visibility
......@@ -987,7 +965,7 @@ class ModuleScope(Scope):
if not type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % name)
# Generate typeobj_cname
if visibility <> 'extern' and not type.typeobj_cname:
if visibility != 'extern' and not type.typeobj_cname:
type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
## Generate typeptr_cname
#type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
......@@ -1076,7 +1054,7 @@ class StructOrUnionScope(Scope):
if type.is_pyobject and not allow_pyobject:
error(pos,
"C struct/union member cannot be a Python object")
if visibility <> 'private':
if visibility != 'private':
error(pos,
"C struct/union member cannot be declared %s" % visibility)
return entry
......@@ -1170,7 +1148,7 @@ class CClassScope(ClassScope):
def __init__(self, name, outer_scope, visibility):
ClassScope.__init__(self, name, outer_scope)
if visibility <> 'extern':
if visibility != 'extern':
self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
......
......@@ -147,7 +147,7 @@ class SlotDescriptor:
def generate_dynamic_init_code(self, scope, code):
if self.is_initialised_dynamically:
value = self.slot_code(scope)
if value <> "0":
if value != "0":
code.putln("%s.%s = %s;" % (
scope.parent_type.typeobj_cname,
self.slot_name,
......
......@@ -6,7 +6,7 @@
def print_call_chain(*args):
import sys
print " ".join(map(str, args))
print(" ".join(map(str, args)))
f = sys._getframe(1)
while f:
name = f.f_code.co_name
......@@ -15,6 +15,6 @@ def print_call_chain(*args):
c = getattr(s, "__class__", None)
if c:
name = "%s.%s" % (c.__name__, name)
print "Called from:", name, f.f_lineno
print("Called from: %s %s" % (name, f.f_lineno))
f = f.f_back
print "-" * 70
print("-" * 70)
......@@ -69,10 +69,10 @@ def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status <> 0:
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
......@@ -87,8 +87,8 @@ def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status <> 0:
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
......@@ -124,8 +124,8 @@ def test_c_compile(link = 0):
except PyrexError, e:
#print "Caught a PyrexError:" ###
#print repr(e) ###
print "%s.%s:" % (e.__class__.__module__,
e.__class__.__name__), e
print("%s.%s: %s" % (e.__class__.__module__,
e.__class__.__name__, e))
sys.exit(1)
else:
obj = arg
......
......@@ -111,7 +111,7 @@ class Lexicon:
tables = None # StateTableMachine
def __init__(self, specifications, debug = None, debug_flags = 7, timings = None):
if type(specifications) <> types.ListType:
if type(specifications) != types.ListType:
raise Errors.InvalidScanner("Scanner definition is not a list")
if timings:
from Timing import time
......@@ -176,9 +176,9 @@ class Lexicon:
raise e.__class__("Token number %d: %s" % (token_number, e))
def parse_token_definition(self, token_spec):
if type(token_spec) <> types.TupleType:
if type(token_spec) != types.TupleType:
raise Errors.InvalidToken("Token definition is not a tuple")
if len(token_spec) <> 2:
if len(token_spec) != 2:
raise Errors.InvalidToken("Wrong number of items in token definition")
pattern, action = token_spec
if not isinstance(pattern, Regexps.RE):
......
......@@ -182,7 +182,7 @@ class FastMachine:
code0, code1 = event
if code0 == -maxint:
state['else'] = new_state
elif code1 <> maxint:
elif code1 != maxint:
while code0 < code1:
state[chr(code0)] = new_state
code0 = code0 + 1
......
......@@ -152,12 +152,12 @@ class RE:
self.wrong_type(num, value, "Plex.RE instance")
def check_string(self, num, value):
if type(value) <> type(''):
if type(value) != type(''):
self.wrong_type(num, value, "string")
def check_char(self, num, value):
self.check_string(num, value)
if len(value) <> 1:
if len(value) != 1:
raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s."
"Expected a string of length 1, got: %s" % (
num, self.__class__.__name__, repr(value)))
......@@ -192,7 +192,7 @@ class RE:
## def build_machine(self, m, initial_state, final_state, match_bol, nocase):
## c = self.char
## if match_bol and c <> BOL:
## if match_bol and c != BOL:
## s1 = self.build_opt(m, initial_state, BOL)
## else:
## s1 = initial_state
......
......@@ -122,8 +122,8 @@ class Scanner:
action = self.run_machine_inlined()
if action:
if self.trace:
print "Scanner: read: Performing", action, "%d:%d" % (
self.start_pos, self.cur_pos)
print("Scanner: read: Performing %s %d:%d" % (
action, self.start_pos, self.cur_pos))
base = self.buf_start_pos
text = self.buffer[self.start_pos - base : self.cur_pos - base]
return (text, action)
......@@ -163,8 +163,8 @@ class Scanner:
trace = self.trace
while 1:
if trace: #TRACE#
print "State %d, %d/%d:%s -->" % ( #TRACE#
state['number'], input_state, cur_pos, repr(cur_char)), #TRACE#
print("State %d, %d/%d:%s -->" % ( #TRACE#
state['number'], input_state, cur_pos, repr(cur_char))) #TRACE#
# Begin inlined self.save_for_backup()
#action = state.action #@slow
action = state['action'] #@fast
......@@ -179,7 +179,7 @@ class Scanner:
new_state = c and state.get('else') #@fast
if new_state:
if trace: #TRACE#
print "State %d" % new_state['number'] #TRACE#
print("State %d" % new_state['number']) #TRACE#
state = new_state
# Begin inlined: self.next_char()
if input_state == 1:
......@@ -228,7 +228,7 @@ class Scanner:
# End inlined self.next_char()
else: # not new_state
if trace: #TRACE#
print "blocked" #TRACE#
print("blocked") #TRACE#
# Begin inlined: action = self.back_up()
if backup_state:
(action, cur_pos, cur_line, cur_line_start,
......@@ -245,7 +245,7 @@ class Scanner:
self.next_pos = next_pos
if trace: #TRACE#
if action: #TRACE#
print "Doing", action #TRACE#
print("Doing " + action) #TRACE#
return action
# def transition(self):
......@@ -288,7 +288,7 @@ class Scanner:
def next_char(self):
input_state = self.input_state
if self.trace:
print "Scanner: next:", " "*20, "[%d] %d" % (input_state, self.cur_pos),
print("Scanner: next: %s [%d] %d" % (" "*20, input_state, self.cur_pos))
if input_state == 1:
self.cur_pos = self.next_pos
c = self.read_char()
......@@ -314,7 +314,7 @@ class Scanner:
else: # input_state = 5
self.cur_char = ''
if self.trace:
print "--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char))
print("--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char)))
# def read_char(self):
# """
......
......@@ -95,9 +95,9 @@ class REParser:
if self.c == ']':
char_list.append(']')
self.next()
while not self.end and self.c <> ']':
while not self.end and self.c != ']':
c1 = self.get()
if self.c == '-' and self.lookahead(1) <> ']':
if self.c == '-' and self.lookahead(1) != ']':
self.next()
c2 = self.get()
for a in xrange(ord(c1), ord(c2) + 1):
......
......@@ -190,7 +190,7 @@ class TransitionMap:
def check(self):
"""Check data structure integrity."""
if not self.map[-3] < self.map[-1]:
print self
print(self)
assert 0
def dump(self, file):
......
......@@ -49,10 +49,10 @@ def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
compiler = compilers[bool(cplus)]
args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
#print compiler, args ###
status = os.spawnvp(os.P_WAIT, compiler, args)
if status <> 0:
if status != 0:
raise CCompilerError("C compiler returned status %s" % status)
return o_file
......@@ -67,8 +67,8 @@ def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
linker = linkers[bool(cplus)]
args = [linker] + linker_options + obj_files + ["-o", out_file]
if verbose_flag or verbose:
print " ".join(args)
print(" ".join(args))
status = os.spawnvp(os.P_WAIT, linker, args)
if status <> 0:
if status != 0:
raise CCompilerError("Linker returned status %s" % status)
return out_file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment