Commit 8aa7be94 authored by Robert Bradshaw's avatar Robert Bradshaw

merge latest cython-devel

parents 6eee297c 736b910d
...@@ -137,7 +137,7 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee ...@@ -137,7 +137,7 @@ def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, nee
if defaults is None: if defaults is None:
defaults = buffer_defaults defaults = buffer_defaults
posargs, dictargs = Interpreter.interpret_compiletime_options(posargs, dictargs, type_env=env) posargs, dictargs = Interpreter.interpret_compiletime_options(posargs, dictargs, type_env=env, type_args = (0,'dtype'))
if len(posargs) > buffer_positional_options_count: if len(posargs) > buffer_positional_options_count:
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY) raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
......
...@@ -667,7 +667,7 @@ class GlobalState(object): ...@@ -667,7 +667,7 @@ class GlobalState(object):
decls_writer = self.parts['decls'] decls_writer = self.parts['decls']
for _, cname, c in c_consts: for _, cname, c in c_consts:
decls_writer.putln('static char %s[] = "%s";' % ( decls_writer.putln('static char %s[] = "%s";' % (
cname, c.escaped_value)) cname, StringEncoding.split_docstring(c.escaped_value)))
if c.py_strings is not None: if c.py_strings is not None:
for py_string in c.py_strings.itervalues(): for py_string in c.py_strings.itervalues():
py_strings.append((c.cname, len(py_string.cname), py_string)) py_strings.append((c.cname, len(py_string.cname), py_string))
......
...@@ -1048,7 +1048,7 @@ class NewExprNode(AtomicExprNode): ...@@ -1048,7 +1048,7 @@ class NewExprNode(AtomicExprNode):
# cppclass string c++ class to create # cppclass string c++ class to create
# template_parameters None or [ExprNode] temlate parameters, if any # template_parameters None or [ExprNode] temlate parameters, if any
def analyse_types(self, env): def infer_type(self, env):
entry = env.lookup(self.cppclass) entry = env.lookup(self.cppclass)
if entry is None or not entry.is_cpp_class: if entry is None or not entry.is_cpp_class:
error(self.pos, "new operator can only be applied to a C++ class") error(self.pos, "new operator can only be applied to a C++ class")
...@@ -1068,6 +1068,10 @@ class NewExprNode(AtomicExprNode): ...@@ -1068,6 +1068,10 @@ class NewExprNode(AtomicExprNode):
self.class_type = type self.class_type = type
self.entry = constructor self.entry = constructor
self.type = constructor.type self.type = constructor.type
return self.type
def analyse_types(self, env):
self.infer_type(env)
def generate_result_code(self, code): def generate_result_code(self, code):
pass pass
...@@ -1803,7 +1807,7 @@ class IndexNode(ExprNode): ...@@ -1803,7 +1807,7 @@ class IndexNode(ExprNode):
base_type = self.base.analyse_as_type(env) base_type = self.base.analyse_as_type(env)
if base_type and not base_type.is_pyobject: if base_type and not base_type.is_pyobject:
if base_type.is_cpp_class: if base_type.is_cpp_class:
if isinstance(self.index, TupleExprNode): if isinstance(self.index, TupleNode):
template_values = self.index.args template_values = self.index.args
else: else:
template_values = [self.index] template_values = [self.index]
......
...@@ -17,7 +17,7 @@ class EmptyScope(object): ...@@ -17,7 +17,7 @@ class EmptyScope(object):
empty_scope = EmptyScope() empty_scope = EmptyScope()
def interpret_compiletime_options(optlist, optdict, type_env=None): def interpret_compiletime_options(optlist, optdict, type_env=None, type_args=()):
""" """
Tries to interpret a list of compile time option nodes. Tries to interpret a list of compile time option nodes.
The result will be a tuple (optlist, optdict) but where The result will be a tuple (optlist, optdict) but where
...@@ -34,21 +34,21 @@ def interpret_compiletime_options(optlist, optdict, type_env=None): ...@@ -34,21 +34,21 @@ def interpret_compiletime_options(optlist, optdict, type_env=None):
A CompileError will be raised if there are problems. A CompileError will be raised if there are problems.
""" """
def interpret(node): def interpret(node, ix):
if isinstance(node, CBaseTypeNode): if ix in type_args:
if type_env: if type_env:
return (node.analyse(type_env), node.pos) return (node.analyse_as_type(type_env), node.pos)
else: else:
raise CompileError(node.pos, "Type not allowed here.") raise CompileError(node.pos, "Type not allowed here.")
else: else:
return (node.compile_time_value(empty_scope), node.pos) return (node.compile_time_value(empty_scope), node.pos)
if optlist: if optlist:
optlist = [interpret(x) for x in optlist] optlist = [interpret(x, ix) for ix, x in enumerate(optlist)]
if optdict: if optdict:
assert isinstance(optdict, DictNode) assert isinstance(optdict, DictNode)
new_optdict = {} new_optdict = {}
for item in optdict.key_value_pairs: for item in optdict.key_value_pairs:
new_optdict[item.key.value] = interpret(item.value) new_optdict[item.key.value] = interpret(item.value, item.key.value)
optdict = new_optdict optdict = new_optdict
return (optlist, new_optdict) return (optlist, new_optdict)
...@@ -791,7 +791,8 @@ class TemplatedTypeNode(CBaseTypeNode): ...@@ -791,7 +791,8 @@ class TemplatedTypeNode(CBaseTypeNode):
if base_type.is_error: return base_type if base_type.is_error: return base_type
if base_type.is_cpp_class: if base_type.is_cpp_class:
if len(self.keyword_args.key_value_pairs) != 0: # Templated class
if self.keyword_args and self.keyword_args.key_value_pairs:
error(self.pos, "c++ templates cannot take keyword arguments"); error(self.pos, "c++ templates cannot take keyword arguments");
self.type = PyrexTypes.error_type self.type = PyrexTypes.error_type
else: else:
...@@ -800,8 +801,8 @@ class TemplatedTypeNode(CBaseTypeNode): ...@@ -800,8 +801,8 @@ class TemplatedTypeNode(CBaseTypeNode):
template_types.append(template_node.analyse_as_type(env)) template_types.append(template_node.analyse_as_type(env))
self.type = base_type.specialize_here(self.pos, template_types) self.type = base_type.specialize_here(self.pos, template_types)
else: elif base_type.is_pyobject:
# Buffer
import Buffer import Buffer
options = Buffer.analyse_buffer_options( options = Buffer.analyse_buffer_options(
...@@ -817,6 +818,24 @@ class TemplatedTypeNode(CBaseTypeNode): ...@@ -817,6 +818,24 @@ class TemplatedTypeNode(CBaseTypeNode):
for name, value in options.iteritems() ]) for name, value in options.iteritems() ])
self.type = PyrexTypes.BufferType(base_type, **options) self.type = PyrexTypes.BufferType(base_type, **options)
else:
# Array
empty_declarator = CNameDeclaratorNode(self.pos, name="")
if len(self.positional_args) > 1 or self.keyword_args.key_value_pairs:
error(self.pos, "invalid array declaration")
self.type = PyrexTypes.error_type
else:
# It would be nice to merge this class with CArrayDeclaratorNode,
# but arrays are part of the declaration, not the type...
if not self.positional_args:
dimension = None
else:
dimension = self.positional_args[0]
self.type = CArrayDeclaratorNode(self.pos,
base = empty_declarator,
dimension = dimension).analyse(base_type, env)[1]
return self.type return self.type
class CComplexBaseTypeNode(CBaseTypeNode): class CComplexBaseTypeNode(CBaseTypeNode):
...@@ -4873,7 +4892,9 @@ class FromImportStatNode(StatNode): ...@@ -4873,7 +4892,9 @@ class FromImportStatNode(StatNode):
break break
else: else:
entry = env.lookup(target.name) entry = env.lookup(target.name)
if entry.is_type and entry.type.name == name and entry.type.module_name == self.module.module_name.value: if (entry.is_type and
entry.type.name == name and
entry.type.module_name == self.module.module_name.value):
continue # already cimported continue # already cimported
target.analyse_target_expression(env, None) target.analyse_target_expression(env, None)
if target.type is py_object_type: if target.type is py_object_type:
......
...@@ -337,8 +337,9 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -337,8 +337,9 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
'address': AmpersandNode, 'address': AmpersandNode,
} }
special_methods = set(['declare', 'union', 'struct', 'typedef', 'sizeof', 'cast', 'pointer', 'compiled', 'NULL'] special_methods = set(['declare', 'union', 'struct', 'typedef', 'sizeof',
+ unop_method_nodes.keys()) 'cast', 'pointer', 'compiled', 'NULL']
+ unop_method_nodes.keys())
def __init__(self, context, compilation_directive_defaults): def __init__(self, context, compilation_directive_defaults):
super(InterpretCompilerDirectives, self).__init__(context) super(InterpretCompilerDirectives, self).__init__(context)
...@@ -373,38 +374,34 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -373,38 +374,34 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
node.cython_module_names = self.cython_module_names node.cython_module_names = self.cython_module_names
return node return node
# Track cimports of the cython module. # The following four functions track imports and cimports that
# begin with "cython"
def is_cython_directive(self, name):
return (name in Options.directive_types or
name in self.special_methods or
PyrexTypes.parse_basic_type(name))
def visit_CImportStatNode(self, node): def visit_CImportStatNode(self, node):
if node.module_name == u"cython": if node.module_name == u"cython":
if node.as_name: self.cython_module_names.add(node.as_name or u"cython")
modname = node.as_name
else:
modname = u"cython"
self.cython_module_names.add(modname)
elif node.module_name.startswith(u"cython."): elif node.module_name.startswith(u"cython."):
if node.as_name: if node.as_name:
self.directive_names[node.as_name] = node.module_name[7:] self.directive_names[node.as_name] = node.module_name[7:]
else: else:
self.cython_module_names.add(u"cython") self.cython_module_names.add(u"cython")
else: # if this cimport was a compiler directive, we don't
return node # want to leave the cimport node sitting in the tree
return None
return node
def visit_FromCImportStatNode(self, node): def visit_FromCImportStatNode(self, node):
if node.module_name.startswith(u"cython."): if (node.module_name == u"cython") or \
is_cython_module = True node.module_name.startswith(u"cython."):
submodule = node.module_name[7:] + u"." submodule = (node.module_name + u".")[7:]
elif node.module_name == u"cython":
is_cython_module = True
submodule = u""
else:
is_cython_module = False
if is_cython_module:
newimp = [] newimp = []
for pos, name, as_name, kind in node.imported_names: for pos, name, as_name, kind in node.imported_names:
full_name = submodule + name full_name = submodule + name
if (full_name in Options.directive_types or if self.is_cython_directive(full_name):
full_name in self.special_methods or
PyrexTypes.parse_basic_type(full_name)):
if as_name is None: if as_name is None:
as_name = full_name as_name = full_name
self.directive_names[as_name] = full_name self.directive_names[as_name] = full_name
...@@ -419,21 +416,13 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): ...@@ -419,21 +416,13 @@ class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
return node return node
def visit_FromImportStatNode(self, node): def visit_FromImportStatNode(self, node):
if node.module.module_name.value.startswith(u"cython."): if (node.module.module_name.value == u"cython") or \
is_cython_module = True node.module.module_name.value.startswith(u"cython."):
submodule = node.module.module_name.value[7:] + u"." submodule = (node.module.module_name.value + u".")[7:]
elif node.module.module_name.value == u"cython":
is_cython_module = True
submodule = u""
else:
is_cython_module = False
if is_cython_module:
newimp = [] newimp = []
for name, name_node in node.items: for name, name_node in node.items:
full_name = submodule + name full_name = submodule + name
if (full_name in Options.directive_types or if self.is_cython_directive(full_name):
full_name in self.special_methods or
PyrexTypes.parse_basic_type(full_name)):
self.directive_names[name_node.name] = full_name self.directive_names[name_node.name] = full_name
else: else:
newimp.append((name, name_node)) newimp.append((name, name_node))
......
...@@ -100,13 +100,13 @@ cpdef p_IF_statement(PyrexScanner s, ctx) ...@@ -100,13 +100,13 @@ cpdef p_IF_statement(PyrexScanner s, ctx)
cpdef p_statement(PyrexScanner s, ctx, bint first_statement = *) cpdef p_statement(PyrexScanner s, ctx, bint first_statement = *)
cpdef p_statement_list(PyrexScanner s, ctx, bint first_statement = *) cpdef p_statement_list(PyrexScanner s, ctx, bint first_statement = *)
cpdef p_suite(PyrexScanner s, ctx = *, bint with_doc = *, bint with_pseudo_doc = *) cpdef p_suite(PyrexScanner s, ctx = *, bint with_doc = *, bint with_pseudo_doc = *)
cpdef p_positional_and_keyword_args(PyrexScanner s, end_sy_set, type_positions= *, type_keywords= * ) cpdef p_positional_and_keyword_args(PyrexScanner s, end_sy_set, templates = *)
cpdef p_c_base_type(PyrexScanner s, bint self_flag = *, bint nonempty = *) cpdef p_c_base_type(PyrexScanner s, bint self_flag = *, bint nonempty = *, templates = *)
cpdef p_calling_convention(PyrexScanner s) cpdef p_calling_convention(PyrexScanner s)
cpdef p_c_complex_base_type(PyrexScanner s) cpdef p_c_complex_base_type(PyrexScanner s)
cpdef p_c_simple_base_type(PyrexScanner s, self_flag, nonempty) cpdef p_c_simple_base_type(PyrexScanner s, bint self_flag, bint nonempty, templates = *)
cpdef p_buffer_access(PyrexScanner s, base_type_node) cpdef p_buffer_or_template(PyrexScanner s, base_type_node, templates)
cpdef bint looking_at_name(PyrexScanner s) except -2 cpdef bint looking_at_name(PyrexScanner s) except -2
cpdef bint looking_at_expr(PyrexScanner s) except -2 cpdef bint looking_at_expr(PyrexScanner s) except -2
cpdef bint looking_at_base_type(PyrexScanner s) except -2 cpdef bint looking_at_base_type(PyrexScanner s) except -2
...@@ -150,4 +150,4 @@ cpdef p_doc_string(PyrexScanner s) ...@@ -150,4 +150,4 @@ cpdef p_doc_string(PyrexScanner s)
cpdef p_code(PyrexScanner s, level= *) cpdef p_code(PyrexScanner s, level= *)
cpdef p_compiler_directive_comments(PyrexScanner s) cpdef p_compiler_directive_comments(PyrexScanner s)
cpdef p_module(PyrexScanner s, pxd, full_module_name) cpdef p_module(PyrexScanner s, pxd, full_module_name)
cpdef p_cpp_class_definition(PyrexScanner s, ctx) cpdef p_cpp_class_definition(PyrexScanner s, pos, ctx)
...@@ -1723,16 +1723,12 @@ def p_suite(s, ctx = Ctx(), with_doc = 0, with_pseudo_doc = 0): ...@@ -1723,16 +1723,12 @@ def p_suite(s, ctx = Ctx(), with_doc = 0, with_pseudo_doc = 0):
else: else:
return body return body
def p_positional_and_keyword_args(s, end_sy_set, type_positions=(), type_keywords=()): def p_positional_and_keyword_args(s, end_sy_set, templates = None):
""" """
Parses positional and keyword arguments. end_sy_set Parses positional and keyword arguments. end_sy_set
should contain any s.sy that terminate the argument list. should contain any s.sy that terminate the argument list.
Argument expansion (* and **) are not allowed. Argument expansion (* and **) are not allowed.
type_positions and type_keywords specifies which argument
positions and/or names which should be interpreted as
types. Other arguments will be treated as expressions.
Returns: (positional_args, keyword_args) Returns: (positional_args, keyword_args)
""" """
positional_args = [] positional_args = []
...@@ -1743,35 +1739,33 @@ def p_positional_and_keyword_args(s, end_sy_set, type_positions=(), type_keyword ...@@ -1743,35 +1739,33 @@ def p_positional_and_keyword_args(s, end_sy_set, type_positions=(), type_keyword
if s.sy == '*' or s.sy == '**': if s.sy == '*' or s.sy == '**':
s.error('Argument expansion not allowed here.') s.error('Argument expansion not allowed here.')
was_keyword = False
parsed_type = False parsed_type = False
if s.sy == 'IDENT': if s.sy == 'IDENT' and s.peek()[0] == '=':
# Since we can have either types or expressions as positional args,
# we use a strategy of looking an extra step forward for a '=' and
# if it is a positional arg we backtrack.
ident = s.systring ident = s.systring
s.next() # s.sy is '='
s.next() s.next()
if s.sy == '=': if looking_at_expr(s):
s.next() arg = p_simple_expr(s)
# Is keyword arg
if ident in type_keywords:
arg = p_c_base_type(s)
parsed_type = True
else:
arg = p_simple_expr(s)
keyword_node = ExprNodes.IdentifierStringNode(
arg.pos, value = EncodedString(ident))
keyword_args.append((keyword_node, arg))
was_keyword = True
else: else:
s.put_back('IDENT', ident) base_type = p_c_base_type(s, templates = templates)
declarator = p_c_declarator(s, empty = 1)
if not was_keyword: arg = Nodes.CComplexBaseTypeNode(base_type.pos,
if pos_idx in type_positions: base_type = base_type, declarator = declarator)
arg = p_c_base_type(s)
parsed_type = True parsed_type = True
else: keyword_node = ExprNodes.IdentifierStringNode(
arg.pos, value = EncodedString(ident))
keyword_args.append((keyword_node, arg))
was_keyword = True
else:
if looking_at_expr(s):
arg = p_simple_expr(s) arg = p_simple_expr(s)
else:
base_type = p_c_base_type(s, templates = templates)
declarator = p_c_declarator(s, empty = 1)
arg = Nodes.CComplexBaseTypeNode(base_type.pos,
base_type = base_type, declarator = declarator)
parsed_type = True
positional_args.append(arg) positional_args.append(arg)
pos_idx += 1 pos_idx += 1
if len(keyword_args) > 0: if len(keyword_args) > 0:
...@@ -1781,9 +1775,7 @@ def p_positional_and_keyword_args(s, end_sy_set, type_positions=(), type_keyword ...@@ -1781,9 +1775,7 @@ def p_positional_and_keyword_args(s, end_sy_set, type_positions=(), type_keyword
if s.sy != ',': if s.sy != ',':
if s.sy not in end_sy_set: if s.sy not in end_sy_set:
if parsed_type: if parsed_type:
s.error("Expected: type") s.error("Unmatched %s" % " or ".join(end_sy_set))
else:
s.error("Expected: expression")
break break
s.next() s.next()
return positional_args, keyword_args return positional_args, keyword_args
...@@ -1875,23 +1867,19 @@ def p_c_simple_base_type(s, self_flag, nonempty, templates = None): ...@@ -1875,23 +1867,19 @@ def p_c_simple_base_type(s, self_flag, nonempty, templates = None):
is_self_arg = self_flag, templates = templates) is_self_arg = self_flag, templates = templates)
# Treat trailing [] on type as buffer access if it appears in a context if s.sy == '[':
# where declarator names are required (so that it cannot mean int[] or return p_buffer_or_template(s, type_node, templates)
# sizeof(int[SIZE]))...
#
# (This means that buffers cannot occur where there can be empty declarators,
# which is an ok restriction to make.)
if nonempty and s.sy == '[':
return p_buffer_or_template(s, type_node)
else: else:
return type_node return type_node
def p_buffer_or_template(s, base_type_node): def p_buffer_or_template(s, base_type_node, templates):
# s.sy == '[' # s.sy == '['
pos = s.position() pos = s.position()
s.next() s.next()
# Note that buffer_positional_options_count=1, so the only positional argument is dtype.
# For templated types, all parameters are types.
positional_args, keyword_args = ( positional_args, keyword_args = (
p_positional_and_keyword_args(s, (']',), (0,), ('dtype',)) p_positional_and_keyword_args(s, (']',), templates)
) )
s.expect(']') s.expect(']')
......
...@@ -2227,49 +2227,78 @@ def is_promotion(src_type, dst_type): ...@@ -2227,49 +2227,78 @@ def is_promotion(src_type, dst_type):
def best_match(args, functions, pos=None): def best_match(args, functions, pos=None):
""" """
Finds the best function to be called Given a list args of arguments and a list of functions, choose one
Error if no function fits the call or an ambiguity is find (two or more possible functions) to call which seems to be the "best" fit for this list of arguments.
This function is used, e.g., when deciding which overloaded method
to dispatch for C++ classes.
We first eliminate functions based on arity, and if only one
function has the correct arity, we return it. Otherwise, we weight
functions based on how much work must be done to convert the
arguments, with the following priorities:
* identical types or pointers to identical types
* promotions
* non-Python types
That is, we prefer functions where no arguments need converted,
and failing that, functions where only promotions are required, and
so on.
If no function is deemed a good fit, or if two or more functions have
the same weight, we return None (as there is no best match). If pos
is not None, we also generate an error.
""" """
# TODO: args should be a list of types, not a list of Nodes. # TODO: args should be a list of types, not a list of Nodes.
actual_nargs = len(args) actual_nargs = len(args)
possibilities = []
bad_types = 0 candidates = []
from_type = None errors = []
target_type = None
for func in functions: for func in functions:
error_mesg = ""
func_type = func.type func_type = func.type
if func_type.is_ptr: if func_type.is_ptr:
func_type = func_type.base_type func_type = func_type.base_type
# Check function type # Check function type
if not func_type.is_cfunction: if not func_type.is_cfunction:
if not func_type.is_error and pos is not None: if not func_type.is_error and pos is not None:
error(pos, "Calling non-function type '%s'" % func_type) error_mesg = "Calling non-function type '%s'" % func_type
return None errors.append((func, error_mesg))
continue
# Check no. of args # Check no. of args
max_nargs = len(func_type.args) max_nargs = len(func_type.args)
min_nargs = max_nargs - func_type.optional_arg_count min_nargs = max_nargs - func_type.optional_arg_count
if actual_nargs < min_nargs \ if actual_nargs < min_nargs or \
or (not func_type.has_varargs and actual_nargs > max_nargs): (not func_type.has_varargs and actual_nargs > max_nargs):
if max_nargs == min_nargs and not func_type.has_varargs: if max_nargs == min_nargs and not func_type.has_varargs:
expectation = max_nargs expectation = max_nargs
elif actual_nargs < min_nargs: elif actual_nargs < min_nargs:
expectation = "at least %s" % min_nargs expectation = "at least %s" % min_nargs
else: else:
expectation = "at most %s" % max_nargs expectation = "at most %s" % max_nargs
error_str = "Call with wrong number of arguments (expected %s, got %s)" \ error_mesg = "Call with wrong number of arguments (expected %s, got %s)" \
% (expectation, actual_nargs) % (expectation, actual_nargs)
continue errors.append((func, error_mesg))
if len(functions) == 1: continue
# Optimize the most common case of no overloading... candidates.append((func, func_type))
return func
# Optimize the most common case of no overloading...
if len(candidates) == 1:
return candidates[0][0]
elif len(candidates) == 0:
if len(errors) == 1 and pos is not None:
error(pos, errors[0][1])
return None
possibilities = []
bad_types = []
for func, func_type in candidates:
score = [0,0,0] score = [0,0,0]
for i in range(min(len(args), len(func_type.args))): for i in range(min(len(args), len(func_type.args))):
src_type = args[i].type src_type = args[i].type
dst_type = func_type.args[i].type dst_type = func_type.args[i].type
if dst_type.assignable_from(src_type): if dst_type.assignable_from(src_type):
if src_type == dst_type or (dst_type.is_reference and \ if src_type == dst_type or (dst_type.is_reference and \
src_type == dst_type.base_type) or \ src_type == dst_type.base_type) \
dst_type.same_as(src_type): or dst_type.same_as(src_type):
pass # score 0 pass # score 0
elif is_promotion(src_type, dst_type): elif is_promotion(src_type, dst_type):
score[2] += 1 score[2] += 1
...@@ -2278,13 +2307,13 @@ def best_match(args, functions, pos=None): ...@@ -2278,13 +2307,13 @@ def best_match(args, functions, pos=None):
else: else:
score[0] += 1 score[0] += 1
else: else:
bad_types = func error_mesg = "Invalid conversion from '%s' to '%s'"%(src_type,
from_type = src_type dst_type)
target_type = dst_type bad_types.append((func, error_mesg))
break break
else: else:
possibilities.append((score, func)) # so we can sort it possibilities.append((score, func)) # so we can sort it
if len(possibilities): if possibilities:
possibilities.sort() possibilities.sort()
if len(possibilities) > 1 and possibilities[0][0] == possibilities[1][0]: if len(possibilities) > 1 and possibilities[0][0] == possibilities[1][0]:
if pos is not None: if pos is not None:
...@@ -2292,10 +2321,10 @@ def best_match(args, functions, pos=None): ...@@ -2292,10 +2321,10 @@ def best_match(args, functions, pos=None):
return None return None
return possibilities[0][1] return possibilities[0][1]
if pos is not None: if pos is not None:
if bad_types: if len(bad_types) == 1:
error(pos, "Invalid conversion from '%s' to '%s'" % (from_type, target_type)) error(pos, bad_types[0][1])
else: else:
error(pos, error_str) error(pos, "no suitable method found")
return None return None
......
...@@ -186,6 +186,8 @@ def escape_byte_string(s): ...@@ -186,6 +186,8 @@ def escape_byte_string(s):
return join_bytes(l).decode('ISO-8859-1') return join_bytes(l).decode('ISO-8859-1')
def split_docstring(s): def split_docstring(s):
# MSVC can't handle long string literals.
if len(s) < 2047: if len(s) < 2047:
return s return s
return '\\n\"\"'.join(s.split(r'\n')) else:
return '""'.join([s[i:i+2000] for i in range(0, len(s), 2000)])
...@@ -1533,7 +1533,10 @@ class CppClassScope(Scope): ...@@ -1533,7 +1533,10 @@ class CppClassScope(Scope):
api = 0, in_pxd = 0, modifiers = ()): api = 0, in_pxd = 0, modifiers = ()):
if name == self.name.split('::')[-1] and cname is None: if name == self.name.split('::')[-1] and cname is None:
name = '<init>' name = '<init>'
prev_entry = self.lookup_here(name)
entry = self.declare_var(name, type, pos, cname, visibility) entry = self.declare_var(name, type, pos, cname, visibility)
if prev_entry:
entry.overloaded_alternatives = prev_entry.all_alternatives()
def declare_inherited_cpp_attributes(self, base_scope): def declare_inherited_cpp_attributes(self, base_scope):
# Declare entries for all the C++ attributes of an # Declare entries for all the C++ attributes of an
......
...@@ -32,14 +32,6 @@ class TestBufferParsing(CythonTest): ...@@ -32,14 +32,6 @@ class TestBufferParsing(CythonTest):
def test_type_keyword(self): def test_type_keyword(self):
self.parse(u"cdef object[foo=foo, dtype=short unsigned int] x") self.parse(u"cdef object[foo=foo, dtype=short unsigned int] x")
def test_notype_as_expr1(self):
self.not_parseable("Expected: expression",
u"cdef object[foo2=short unsigned int] x")
def test_notype_as_expr2(self):
self.not_parseable("Expected: expression",
u"cdef object[int, short unsigned int] x")
def test_pos_after_key(self): def test_pos_after_key(self):
self.not_parseable("Non-keyword arg following keyword arg", self.not_parseable("Non-keyword arg following keyword arg",
u"cdef object[foo=1, 2] x") u"cdef object[foo=1, 2] x")
......
...@@ -28,8 +28,8 @@ from distutils.core import Extension ...@@ -28,8 +28,8 @@ from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext from distutils.command.build_ext import build_ext as _build_ext
distutils_distro = Distribution() distutils_distro = Distribution()
TEST_DIRS = ['compile', 'errors', 'run', 'pyregr'] TEST_DIRS = ['compile', 'errors', 'run', 'wrappers', 'pyregr']
TEST_RUN_DIRS = ['run', 'pyregr'] TEST_RUN_DIRS = ['run', 'wrappers', 'pyregr']
# Lists external modules, and a matcher matching tests # Lists external modules, and a matcher matching tests
# which should be excluded if the module is not present. # which should be excluded if the module is not present.
...@@ -200,10 +200,10 @@ class TestBuilder(object): ...@@ -200,10 +200,10 @@ class TestBuilder(object):
fork=self.fork) fork=self.fork)
class CythonCompileTestCase(unittest.TestCase): class CythonCompileTestCase(unittest.TestCase):
def __init__(self, directory, workdir, module, language='c', def __init__(self, test_directory, workdir, module, language='c',
expect_errors=False, annotate=False, cleanup_workdir=True, expect_errors=False, annotate=False, cleanup_workdir=True,
cleanup_sharedlibs=True, cython_only=False, fork=True): cleanup_sharedlibs=True, cython_only=False, fork=True):
self.directory = directory self.test_directory = test_directory
self.workdir = workdir self.workdir = workdir
self.module = module self.module = module
self.language = language self.language = language
...@@ -257,8 +257,8 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -257,8 +257,8 @@ class CythonCompileTestCase(unittest.TestCase):
self.runCompileTest() self.runCompileTest()
def runCompileTest(self): def runCompileTest(self):
self.compile(self.directory, self.module, self.workdir, self.compile(self.test_directory, self.module, self.workdir,
self.directory, self.expect_errors, self.annotate) self.test_directory, self.expect_errors, self.annotate)
def find_module_source_file(self, source_file): def find_module_source_file(self, source_file):
if not os.path.exists(source_file): if not os.path.exists(source_file):
...@@ -269,8 +269,15 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -269,8 +269,15 @@ class CythonCompileTestCase(unittest.TestCase):
target = '%s.%s' % (module_name, self.language) target = '%s.%s' % (module_name, self.language)
return target return target
def split_source_and_output(self, directory, module, workdir): def find_source_files(self, test_directory, module_name):
source_file = os.path.join(directory, module) + '.pyx' is_related = re.compile('%s_.*[.]%s' % (module_name, self.language)).match
return [self.build_target_filename(module_name)] + [
os.path.join(test_directory, filename)
for filename in os.listdir(test_directory)
if is_related(filename) and os.path.isfile(os.path.join(test_directory, filename)) ]
def split_source_and_output(self, test_directory, module, workdir):
source_file = os.path.join(test_directory, module) + '.pyx'
source_and_output = codecs.open( source_and_output = codecs.open(
self.find_module_source_file(source_file), 'rU', 'ISO-8859-1') self.find_module_source_file(source_file), 'rU', 'ISO-8859-1')
out = codecs.open(os.path.join(workdir, module + '.pyx'), out = codecs.open(os.path.join(workdir, module + '.pyx'),
...@@ -289,12 +296,12 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -289,12 +296,12 @@ class CythonCompileTestCase(unittest.TestCase):
else: else:
return geterrors() return geterrors()
def run_cython(self, directory, module, targetdir, incdir, annotate): def run_cython(self, test_directory, module, targetdir, incdir, annotate):
include_dirs = INCLUDE_DIRS[:] include_dirs = INCLUDE_DIRS[:]
if incdir: if incdir:
include_dirs.append(incdir) include_dirs.append(incdir)
source = self.find_module_source_file( source = self.find_module_source_file(
os.path.join(directory, module + '.pyx')) os.path.join(test_directory, module + '.pyx'))
target = os.path.join(targetdir, self.build_target_filename(module)) target = os.path.join(targetdir, self.build_target_filename(module))
options = CompilationOptions( options = CompilationOptions(
pyrex_default_options, pyrex_default_options,
...@@ -309,7 +316,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -309,7 +316,7 @@ class CythonCompileTestCase(unittest.TestCase):
cython_compile(source, options=options, cython_compile(source, options=options,
full_module_name=module) full_module_name=module)
def run_distutils(self, module, workdir, incdir): def run_distutils(self, test_directory, module, workdir, incdir):
cwd = os.getcwd() cwd = os.getcwd()
os.chdir(workdir) os.chdir(workdir)
try: try:
...@@ -324,7 +331,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -324,7 +331,7 @@ class CythonCompileTestCase(unittest.TestCase):
ext_include_dirs += get_additional_include_dirs() ext_include_dirs += get_additional_include_dirs()
extension = Extension( extension = Extension(
module, module,
sources = [self.build_target_filename(module)], sources = self.find_source_files(test_directory, module),
include_dirs = ext_include_dirs, include_dirs = ext_include_dirs,
extra_compile_args = CFLAGS, extra_compile_args = CFLAGS,
) )
...@@ -337,19 +344,19 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -337,19 +344,19 @@ class CythonCompileTestCase(unittest.TestCase):
finally: finally:
os.chdir(cwd) os.chdir(cwd)
def compile(self, directory, module, workdir, incdir, def compile(self, test_directory, module, workdir, incdir,
expect_errors, annotate): expect_errors, annotate):
expected_errors = errors = () expected_errors = errors = ()
if expect_errors: if expect_errors:
expected_errors = self.split_source_and_output( expected_errors = self.split_source_and_output(
directory, module, workdir) test_directory, module, workdir)
directory = workdir test_directory = workdir
if WITH_CYTHON: if WITH_CYTHON:
old_stderr = sys.stderr old_stderr = sys.stderr
try: try:
sys.stderr = ErrorWriter() sys.stderr = ErrorWriter()
self.run_cython(directory, module, workdir, incdir, annotate) self.run_cython(test_directory, module, workdir, incdir, annotate)
errors = sys.stderr.geterrors() errors = sys.stderr.geterrors()
finally: finally:
sys.stderr = old_stderr sys.stderr = old_stderr
...@@ -373,7 +380,7 @@ class CythonCompileTestCase(unittest.TestCase): ...@@ -373,7 +380,7 @@ class CythonCompileTestCase(unittest.TestCase):
raise raise
else: else:
if not self.cython_only: if not self.cython_only:
self.run_distutils(module, workdir, incdir) self.run_distutils(test_directory, module, workdir, incdir)
class CythonRunTestCase(CythonCompileTestCase): class CythonRunTestCase(CythonCompileTestCase):
def shortDescription(self): def shortDescription(self):
......
...@@ -9,4 +9,5 @@ missing_baseclass_in_predecl_T262 ...@@ -9,4 +9,5 @@ missing_baseclass_in_predecl_T262
cfunc_call_tuple_args_T408 cfunc_call_tuple_args_T408
cascaded_list_unpacking_T467 cascaded_list_unpacking_T467
compile.cpp_operators compile.cpp_operators
cpp_nested_templates cppwrap
cpp_overload_wrapper
...@@ -14,7 +14,7 @@ def f(): ...@@ -14,7 +14,7 @@ def f():
_ERRORS = u""" _ERRORS = u"""
1:17: Buffer types only allowed as function local variables 1:17: Buffer types only allowed as function local variables
3:21: Buffer types only allowed as function local variables 3:21: Buffer types only allowed as function local variables
6:27: "fakeoption" is not a buffer option 6:31: "fakeoption" is not a buffer option
""" """
#TODO: #TODO:
#7:22: "ndim" must be non-negative #7:22: "ndim" must be non-negative
......
cimport cython
cdef extern from "Python.h":
cdef cython.unicode PyUnicode_DecodeUTF8(char* s, Py_ssize_t size, char* errors)
def test_capi():
"""
>>> print(test_capi())
abc
"""
return PyUnicode_DecodeUTF8("abc", 3, NULL)
from cython import dereference as deref from cython.operator cimport dereference as deref
cdef extern from "cpp_templates_helper.h": cdef extern from "cpp_templates_helper.h":
cdef cppclass Wrap[T]: cdef cppclass Wrap[T]:
...@@ -17,15 +17,15 @@ cdef extern from "cpp_templates_helper.h": ...@@ -17,15 +17,15 @@ cdef extern from "cpp_templates_helper.h":
def test_wrap_pair(int i, double x): def test_wrap_pair(int i, double x):
""" """
>>> test_wrap_pair(1, 1.5) >>> test_wrap_pair(1, 1.5)
(1, 1.5, True, False) (1, 1.5, True)
>>> test_wrap_pair(2, 2.25) >>> test_wrap_pair(2, 2.25)
(2, 2.25, True, False) (2, 2.25, True)
""" """
cdef Pair[int, double] *pair cdef Pair[int, double] *pair
cdef Wrap[Pair[int, double]] *wrap cdef Wrap[Pair[int, double]] *wrap
try: try:
pair = new Pair[int, double](i, x) pair = new Pair[int, double](i, x)
warp = new Wrap[Pair[int, double]](deref(pair)) wrap = new Wrap[Pair[int, double]](deref(pair))
return wrap.get().first(), wrap.get().second(), deref(wrap) == deref(wrap) return wrap.get().first(), wrap.get().second(), deref(wrap) == deref(wrap)
finally: finally:
del pair, wrap del pair, wrap
...@@ -2,7 +2,7 @@ template <class T> ...@@ -2,7 +2,7 @@ template <class T>
class Wrap { class Wrap {
T value; T value;
public: public:
Wrap(T v) { value = v; } Wrap(T v) : value(v) { }
void set(T v) { value = v; } void set(T v) { value = v; }
T get(void) { return value; } T get(void) { return value; }
bool operator==(Wrap<T> other) { return value == other.value; } bool operator==(Wrap<T> other) { return value == other.value; }
......
cimport cppwrap_lib
cdef class DoubleKeeper:
cdef cppwrap_lib.DoubleKeeper* keeper
def __cinit__(self, number=None):
if number is None:
self.keeper = new cppwrap_lib.DoubleKeeper()
else:
self.keeper = new cppwrap_lib.DoubleKeeper(number)
def __dealloc__(self):
del self.keeper
def set_number(self, number=None):
if number is None:
self.keeper.set_number()
else:
self.keeper.set_number(number)
def get_number(self):
return self.keeper.get_number()
def transmogrify(self, double value):
return self.keeper.transmogrify(value)
def voidfunc():
cppwrap_lib.voidfunc()
def doublefunc(double x, double y, double z):
return cppwrap_lib.doublefunc(x, y, z)
def transmogrify_from_cpp(DoubleKeeper obj not None, double value):
return cppwrap_lib.transmogrify_from_cpp(obj.keeper, value)
#include "cppwrap_lib.h"
void voidfunc (void)
{
}
double doublefunc (double a, double b, double c)
{
return a + b + c;
}
DoubleKeeper::DoubleKeeper ()
: number (1.0)
{
}
DoubleKeeper::DoubleKeeper (double factor)
: number (factor)
{
}
DoubleKeeper::~DoubleKeeper ()
{
}
double DoubleKeeper::get_number () const
{
return number;
}
void DoubleKeeper::set_number (double f)
{
number = f;
}
void DoubleKeeper::set_number ()
{
number = 1.0;
}
double
DoubleKeeper::transmogrify (double value) const
{
return value*number;
}
double
transmogrify_from_cpp (DoubleKeeper const *obj, double value)
{
return obj->transmogrify (value);
}
void voidfunc(void);
double doublefunc (double a, double b, double c);
class DoubleKeeper
{
double number;
public:
DoubleKeeper ();
DoubleKeeper (double number);
virtual ~DoubleKeeper ();
void set_number (double num);
void set_number (void);
double get_number () const;
virtual double transmogrify (double value) const;
};
double transmogrify_from_cpp (DoubleKeeper const *obj, double value);
cdef extern from "testapi.h":
void voidfunc()
double doublefunc(double a, double b, double c)
cdef cppclass DoubleKeeper:
DoubleKeeper()
DoubleKeeper(double factor)
void set_number()
void set_number(double f)
double get_number()
double transmogrify(double value)
double transmogrify_from_cpp (DoubleKeeper *obj, double value)
cimport cppwrap_lib
cdef class DoubleKeeper:
cdef cppwrap_lib.DoubleKeeper* keeper
def __cinit__(self, double number):
self.keeper = new cppwrap_lib.DoubleKeeper(number)
def __dealloc__(self):
del self.keeper
def set_number(self, double number):
self.keeper.set_number(number)
def get_number(self):
return self.keeper.get_number()
def transmogrify(self, double value):
return self.keeper.transmogrify(value)
def voidfunc():
cppwrap_lib.voidfunc()
def doublefunc(double x, double y, double z):
return cppwrap_lib.doublefunc(x, y, z)
def transmogrify_from_cpp(DoubleKeeper obj not None, double value):
return cppwrap_lib.transmogrify_from_cpp(obj.keeper, value)
#include "cppwrap_lib.h"
void voidfunc (void)
{
}
double doublefunc (double a, double b, double c)
{
return a + b + c;
}
DoubleKeeper::DoubleKeeper ()
: number (1.0)
{
}
DoubleKeeper::DoubleKeeper (double factor)
: number (factor)
{
}
DoubleKeeper::~DoubleKeeper ()
{
}
double DoubleKeeper::get_number () const
{
return number;
}
void DoubleKeeper::set_number (double f)
{
number = f;
}
void DoubleKeeper::set_number ()
{
number = 1.0;
}
double
DoubleKeeper::transmogrify (double value) const
{
return value*number;
}
double
transmogrify_from_cpp (DoubleKeeper const *obj, double value)
{
return obj->transmogrify (value);
}
void voidfunc(void);
double doublefunc (double a, double b, double c);
class DoubleKeeper
{
double number;
public:
DoubleKeeper (double number);
virtual ~DoubleKeeper ();
void set_number (double num);
double get_number () const;
virtual double transmogrify (double value) const;
};
double transmogrify_from_cpp (DoubleKeeper const *obj, double value);
cdef extern from "testapi.h":
void voidfunc()
double doublefunc(double a, double b, double c)
cdef cppclass DoubleKeeper:
DoubleKeeper(double factor)
void set_number(double f)
double get_number()
double transmogrify(double value)
double transmogrify_from_cpp (DoubleKeeper *obj, double value)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment