Commit 8e396c4b authored by scoder's avatar scoder

Merge pull request #95 from scoder/_dict_iter_rewrite

merging _dict_iter_rewrite branch after discussion on cython-devel list
parents 286364bf 42f58e7c
......@@ -9752,25 +9752,9 @@ static CYTHON_INLINE int __Pyx_ErrOccurredWithGIL(void) {
#------------------------------------------------------------------------------------
raise_noneattr_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseNoneAttributeError(const char* attrname);
""",
impl = '''
static CYTHON_INLINE void __Pyx_RaiseNoneAttributeError(const char* attrname) {
PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", attrname);
}
''')
raise_noneindex_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseNoneIndexingError(void);
""",
impl = '''
static CYTHON_INLINE void __Pyx_RaiseNoneIndexingError(void) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is unsubscriptable");
}
''')
raise_noneattr_error_utility_code = UtilityCode.load_cached("RaiseNoneAttrError", "ObjectHandling.c")
raise_noneindex_error_utility_code = UtilityCode.load_cached("RaiseNoneIndexingError", "ObjectHandling.c")
raise_none_iter_error_utility_code = UtilityCode.load_cached("RaiseNoneIterError", "ObjectHandling.c")
raise_noneindex_memview_error_utility_code = UtilityCode(
proto = """
......@@ -9782,16 +9766,6 @@ static CYTHON_INLINE void __Pyx_RaiseNoneMemviewIndexingError(void) {
}
''')
raise_none_iter_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);
""",
impl = '''
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
}
''')
raise_unbound_local_error_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname);
......@@ -10029,92 +10003,14 @@ impl = """
#------------------------------------------------------------------------------------
raise_too_many_values_to_unpack = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
""",
impl = '''
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
PyErr_Format(PyExc_ValueError,
"too many values to unpack (expected %"PY_FORMAT_SIZE_T"d)", expected);
}
''')
raise_need_more_values_to_unpack = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
""",
impl = '''
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
PyErr_Format(PyExc_ValueError,
"need more than %"PY_FORMAT_SIZE_T"d value%s to unpack",
index, (index == 1) ? "" : "s");
}
''')
raise_too_many_values_to_unpack = UtilityCode.load_cached("RaiseTooManyValuesToUnpack", "ObjectHandling.c")
raise_need_more_values_to_unpack = UtilityCode.load_cached("RaiseNeedMoreValuesToUnpack", "ObjectHandling.c")
#------------------------------------------------------------------------------------
tuple_unpacking_error_code = UtilityCode(
proto = """
static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); /*proto*/
""",
impl = """
static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) {
if (t == Py_None) {
__Pyx_RaiseNoneNotIterableError();
} else if (PyTuple_GET_SIZE(t) < index) {
__Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t));
} else {
__Pyx_RaiseTooManyValuesError(index);
}
}
""",
requires = [raise_none_iter_error_utility_code,
raise_need_more_values_to_unpack,
raise_too_many_values_to_unpack]
)
unpacking_utility_code = UtilityCode(
proto = """
static PyObject *__Pyx_UnpackItem(PyObject *, Py_ssize_t index); /*proto*/
""",
impl = """
static PyObject *__Pyx_UnpackItem(PyObject *iter, Py_ssize_t index) {
PyObject *item;
if (!(item = PyIter_Next(iter))) {
if (!PyErr_Occurred()) {
__Pyx_RaiseNeedMoreValuesError(index);
}
}
return item;
}
""",
requires = [raise_need_more_values_to_unpack]
)
iternext_unpacking_end_utility_code = UtilityCode(
proto = """
static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/
""",
impl = """
static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
if (unlikely(retval)) {
Py_DECREF(retval);
__Pyx_RaiseTooManyValuesError(expected);
return -1;
} else if (PyErr_Occurred()) {
if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) {
PyErr_Clear();
return 0;
} else {
return -1;
}
}
return 0;
}
""",
requires = [raise_too_many_values_to_unpack]
)
tuple_unpacking_error_code = UtilityCode.load_cached("UnpackTupleError", "ObjectHandling.c")
unpacking_utility_code = UtilityCode.load_cached("UnpackItem", "ObjectHandling.c")
iternext_unpacking_end_utility_code = UtilityCode.load_cached("UnpackItemEndCheck", "ObjectHandling.c")
#------------------------------------------------------------------------------------
......
......@@ -5772,45 +5772,98 @@ class DictIterationNextNode(Node):
# Helper node for calling PyDict_Next() inside of a WhileStatNode
# and checking the dictionary size for changes. Created in
# Optimize.py.
child_attrs = ['dict_obj', 'expected_size', 'pos_index_addr', 'key_addr', 'value_addr']
child_attrs = ['dict_obj', 'expected_size', 'pos_index_var',
'coerced_key_var', 'coerced_value_var', 'coerced_tuple_var',
'key_target', 'value_target', 'tuple_target', 'is_dict_flag']
def __init__(self, dict_obj, expected_size, pos_index_addr, key_addr, value_addr):
coerced_key_var = key_ref = None
coerced_value_var = value_ref = None
coerced_tuple_var = tuple_ref = None
def __init__(self, dict_obj, expected_size, pos_index_var,
key_target, value_target, tuple_target, is_dict_flag):
Node.__init__(
self, dict_obj.pos,
dict_obj = dict_obj,
expected_size = expected_size,
pos_index_addr = pos_index_addr,
key_addr = key_addr,
value_addr = value_addr,
pos_index_var = pos_index_var,
key_target = key_target,
value_target = value_target,
tuple_target = tuple_target,
is_dict_flag = is_dict_flag,
is_temp = True,
type = PyrexTypes.c_bint_type)
def analyse_expressions(self, env):
import UtilNodes
self.dict_obj.analyse_types(env)
self.expected_size.analyse_types(env)
self.pos_index_addr.analyse_types(env)
self.key_addr.analyse_types(env)
self.value_addr.analyse_types(env)
if self.pos_index_var: self.pos_index_var.analyse_types(env)
if self.key_target:
self.key_target.analyse_target_types(env)
self.key_ref = UtilNodes.ResultRefNode(pos=self.key_target.pos, is_temp=True,
type=PyrexTypes.py_object_type)
self.coerced_key_var = self.key_ref.coerce_to(self.key_target.type, env)
if self.value_target:
self.value_target.analyse_target_types(env)
self.value_ref = UtilNodes.ResultRefNode(pos=self.value_target.pos, is_temp=True,
type=PyrexTypes.py_object_type)
self.coerced_value_var = self.value_ref.coerce_to(self.value_target.type, env)
if self.tuple_target:
self.tuple_target.analyse_target_types(env)
self.tuple_ref = UtilNodes.ResultRefNode(pos=self.tuple_target.pos, is_temp=True,
type=PyrexTypes.py_object_type)
self.coerced_tuple_var = self.tuple_ref.coerce_to(self.tuple_target.type, env)
self.is_dict_flag.analyse_types(env)
def generate_function_definitions(self, env, code):
self.dict_obj.generate_function_definitions(env, code)
def generate_execution_code(self, code):
code.globalstate.use_utility_code(UtilityCode.load_cached("dict_iter", "Optimize.c"))
self.dict_obj.generate_evaluation_code(code)
code.putln("if (unlikely(%s != PyDict_Size(%s))) {" % (
self.expected_size.result(),
self.dict_obj.py_result(),
))
code.putln('PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); %s' % (
code.error_goto(self.pos)))
code.putln("}")
self.pos_index_addr.generate_evaluation_code(code)
code.putln("if (!PyDict_Next(%s, %s, %s, %s)) break;" % (
assignments = []
temp_addresses = []
for var, result, target in [(self.key_ref, self.coerced_key_var, self.key_target),
(self.value_ref, self.coerced_value_var, self.value_target),
(self.tuple_ref, self.coerced_tuple_var, self.tuple_target)]:
if target is None:
addr = 'NULL'
else:
temp = code.funcstate.allocate_temp(PyrexTypes.py_object_type, True)
var.result_code = temp
assignments.append((temp, result, target))
addr = '&%s' % temp
temp_addresses.append(addr)
result_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False)
code.putln("%s = __Pyx_dict_iter_next(%s, %s, &%s, %s, %s, %s, %s);" % (
result_temp,
self.dict_obj.py_result(),
self.pos_index_addr.result(),
self.key_addr.result(),
self.value_addr.result()))
self.expected_size.result(),
self.pos_index_var.result(),
temp_addresses[0],
temp_addresses[1],
temp_addresses[2],
self.is_dict_flag.result()
))
code.putln("if (unlikely(%s == 0)) break;" % result_temp)
code.putln(code.error_goto_if("%s == -1" % result_temp, self.pos))
code.funcstate.release_temp(result_temp)
# evaluate all coercions before the assignments
for temp, result, target in assignments:
code.put_gotref(temp)
result.generate_evaluation_code(code)
if not result.type.is_pyobject:
code.put_decref_clear(temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(temp)
for temp, result, target in assignments:
target.generate_assignment_code(result, code)
if result.type.is_pyobject:
code.funcstate.release_temp(temp)
def ForStatNode(pos, **kw):
if 'iterator' in kw:
......
......@@ -152,7 +152,7 @@ class IterationTransform(Visitor.VisitorTransform):
# CPython raises an error here: not a sequence
return node
return self._transform_dict_iteration(
node, dict_obj=iterator, keys=True, values=False)
node, dict_obj=iterator, method=None, keys=True, values=False)
# C array (slice) iteration?
if iterator.type.is_ptr or iterator.type.is_array:
......@@ -164,14 +164,17 @@ class IterationTransform(Visitor.VisitorTransform):
if not isinstance(iterator, ExprNodes.SimpleCallNode):
return node
if iterator.args is None:
arg_count = iterator.arg_tuple and len(iterator.arg_tuple.args) or 0
else:
arg_count = len(iterator.args)
if arg_count and iterator.self is not None:
arg_count -= 1
function = iterator.function
# dict iteration?
if isinstance(function, ExprNodes.AttributeNode) and \
function.obj.type == Builtin.dict_type:
if reversed:
# CPython raises an error here: not a sequence
return node
dict_obj = iterator.self or function.obj
if function.is_attribute and not reversed and not arg_count:
base_obj = iterator.self or function.obj
method = function.attribute
is_py3 = self.module_scope.context.language_level >= 3
......@@ -182,10 +185,10 @@ class IterationTransform(Visitor.VisitorTransform):
values = True
elif method == 'iteritems' or (is_py3 and method == 'items'):
keys = values = True
else:
return node
return self._transform_dict_iteration(
node, dict_obj, keys, values)
if keys or values:
return self._transform_dict_iteration(
node, base_obj, method, keys, values)
# enumerate/reversed ?
if iterator.self is None and function.is_name and \
......@@ -607,9 +610,7 @@ class IterationTransform(Visitor.VisitorTransform):
return for_node
def _transform_dict_iteration(self, node, dict_obj, keys, values):
py_object_ptr = PyrexTypes.py_object_type
def _transform_dict_iteration(self, node, dict_obj, method, keys, values):
temps = []
temp = UtilNodes.TempHandle(PyrexTypes.py_object_type)
temps.append(temp)
......@@ -617,36 +618,8 @@ class IterationTransform(Visitor.VisitorTransform):
temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
temps.append(temp)
pos_temp = temp.ref(node.pos)
pos_temp_addr = ExprNodes.AmpersandNode(
node.pos, operand=pos_temp,
type=PyrexTypes.c_ptr_type(PyrexTypes.c_py_ssize_t_type))
target_temps = []
if keys:
temp = UtilNodes.TempHandle(
py_object_ptr, needs_cleanup=False) # ref will be stolen
target_temps.append(temp)
key_temp = temp.ref(node.target.pos)
key_temp_addr = ExprNodes.AmpersandNode(
node.target.pos, operand=key_temp,
type=PyrexTypes.c_ptr_type(py_object_ptr))
else:
key_temp_addr = key_temp = ExprNodes.NullNode(
pos=node.target.pos)
if values:
temp = UtilNodes.TempHandle(
py_object_ptr, needs_cleanup=False) # ref will be stolen
target_temps.append(temp)
value_temp = temp.ref(node.target.pos)
value_temp_addr = ExprNodes.AmpersandNode(
node.target.pos, operand=value_temp,
type=PyrexTypes.c_ptr_type(py_object_ptr))
else:
value_temp_addr = value_temp = ExprNodes.NullNode(
pos=node.target.pos)
key_target = value_target = node.target
tuple_target = None
key_target = value_target = tuple_target = None
if keys and values:
if node.target.is_sequence_constructor:
if len(node.target.args) == 2:
......@@ -656,67 +629,10 @@ class IterationTransform(Visitor.VisitorTransform):
return node
else:
tuple_target = node.target
def coerce_object_to(obj_node, dest_type):
if dest_type.is_pyobject:
if dest_type != obj_node.type:
if dest_type.is_extension_type or dest_type.is_builtin_type:
obj_node = ExprNodes.PyTypeTestNode(
obj_node, dest_type, self.current_scope, notnone=True)
result = ExprNodes.TypecastNode(
obj_node.pos,
operand = obj_node,
type = dest_type)
return (result, None)
else:
temp = UtilNodes.TempHandle(dest_type)
target_temps.append(temp)
temp_result = temp.ref(obj_node.pos)
class CoercedTempNode(ExprNodes.CoerceFromPyTypeNode):
def result(self):
return temp_result.result()
def generate_execution_code(self, code):
self.generate_result_code(code)
return (temp_result, CoercedTempNode(dest_type, obj_node, self.current_scope))
if tuple_target:
tuple_result = ExprNodes.TupleNode(
pos = tuple_target.pos,
args = [key_temp, value_temp],
is_temp = 1,
type = Builtin.tuple_type,
)
body_init_stats = [
Nodes.SingleAssignmentNode(
pos = tuple_target.pos,
lhs = tuple_target,
rhs = tuple_result)
]
elif keys:
key_target = node.target
else:
# execute all coercions before the assignments
coercion_stats = []
assign_stats = []
if keys:
temp_result, coercion = coerce_object_to(
key_temp, key_target.type)
if coercion:
coercion_stats.append(coercion)
assign_stats.append(
Nodes.SingleAssignmentNode(
pos = key_temp.pos,
lhs = key_target,
rhs = temp_result))
if values:
temp_result, coercion = coerce_object_to(
value_temp, value_target.type)
if coercion:
coercion_stats.append(coercion)
assign_stats.append(
Nodes.SingleAssignmentNode(
pos = value_temp.pos,
lhs = value_target,
rhs = temp_result))
body_init_stats = coercion_stats + assign_stats
value_target = node.target
if isinstance(node.body, Nodes.StatListNode):
body = node.body
......@@ -727,24 +643,39 @@ class IterationTransform(Visitor.VisitorTransform):
# keep original length to guard against dict modification
dict_len_temp = UtilNodes.TempHandle(PyrexTypes.c_py_ssize_t_type)
temps.append(dict_len_temp)
dict_len_temp_addr = ExprNodes.AmpersandNode(
node.pos, operand=dict_len_temp.ref(dict_obj.pos),
type=PyrexTypes.c_ptr_type(dict_len_temp.type))
temp = UtilNodes.TempHandle(PyrexTypes.c_int_type)
temps.append(temp)
is_dict_temp = temp.ref(node.pos)
is_dict_temp_addr = ExprNodes.AmpersandNode(
node.pos, operand=is_dict_temp,
type=PyrexTypes.c_ptr_type(temp.type))
iter_next_node = Nodes.DictIterationNextNode(
dict_temp, dict_len_temp.ref(dict_obj.pos), pos_temp,
key_target, value_target, tuple_target,
is_dict_temp)
iter_next_node.analyse_expressions(self.current_scope)
body.stats[0:0] = [iter_next_node]
if method:
method_node = ExprNodes.StringNode(
dict_obj.pos, is_identifier=True, value=method)
dict_obj = dict_obj.as_none_safe_node(
"'NoneType' object has no attribute '%s'",
error = "PyExc_AttributeError",
format_args = [method])
else:
method_node = ExprNodes.NullNode(dict_obj.pos)
dict_obj = dict_obj.as_none_safe_node("'NoneType' object is not iterable")
body_init_stats.insert(0, Nodes.DictIterationNextNode(
dict_temp,
dict_len_temp.ref(dict_obj.pos),
pos_temp_addr, key_temp_addr, value_temp_addr
))
body.stats[0:0] = [UtilNodes.TempsBlockNode(
node.pos,
temps = target_temps,
body = Nodes.StatListNode(pos = node.pos,
stats = body_init_stats)
)]
def flag_node(value):
value = value and 1 or 0
return ExprNodes.IntNode(node.pos, value=str(value), constant_result=value)
result_code = [
Nodes.SingleAssignmentNode(
pos = dict_obj.pos,
lhs = dict_temp,
rhs = dict_obj),
Nodes.SingleAssignmentNode(
pos = node.pos,
lhs = pos_temp,
......@@ -752,16 +683,16 @@ class IterationTransform(Visitor.VisitorTransform):
constant_result=0)),
Nodes.SingleAssignmentNode(
pos = dict_obj.pos,
lhs = dict_len_temp.ref(dict_obj.pos),
rhs = ExprNodes.SimpleCallNode(
pos = dict_obj.pos,
type = PyrexTypes.c_py_ssize_t_type,
function = ExprNodes.NameNode(
pos = dict_obj.pos,
name = self.PyDict_Size_name,
type = self.PyDict_Size_func_type,
entry = self.PyDict_Size_entry),
args = [dict_temp],
lhs = dict_temp,
rhs = ExprNodes.PythonCapiCallNode(
dict_obj.pos,
"__Pyx_dict_iterator",
self.PyDict_Iterator_func_type,
utility_code = UtilityCode.load_cached("dict_iter", "Optimize.c"),
args = [dict_obj, flag_node(dict_obj.type is Builtin.dict_type),
method_node, dict_len_temp_addr, is_dict_temp_addr,
],
is_temp=True,
)),
Nodes.WhileStatNode(
pos = node.pos,
......@@ -778,6 +709,15 @@ class IterationTransform(Visitor.VisitorTransform):
stats = result_code
))
PyDict_Iterator_func_type = PyrexTypes.CFuncType(
PyrexTypes.py_object_type, [
PyrexTypes.CFuncTypeArg("dict", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("is_dict", PyrexTypes.c_int_type, None),
PyrexTypes.CFuncTypeArg("method_name", PyrexTypes.py_object_type, None),
PyrexTypes.CFuncTypeArg("p_orig_length", PyrexTypes.c_py_ssize_t_ptr_type, None),
PyrexTypes.CFuncTypeArg("p_is_dict", PyrexTypes.c_int_ptr_type, None),
])
class SwitchTransform(Visitor.VisitorTransform):
"""
......
......@@ -120,7 +120,7 @@ class ResultRefNode(AtomicExprNode):
subexprs = []
lhs_of_first_assignment = False
def __init__(self, expression=None, pos=None, type=None, may_hold_none=True):
def __init__(self, expression=None, pos=None, type=None, may_hold_none=True, is_temp=False):
self.expression = expression
self.pos = None
self.may_hold_none = may_hold_none
......@@ -132,6 +132,8 @@ class ResultRefNode(AtomicExprNode):
self.pos = pos
if type is not None:
self.type = type
if is_temp:
self.is_temp = True
assert self.pos is not None
def clone_node(self):
......
/////////////// RaiseNoneAttrError.proto ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneAttributeError(const char* attrname);
/////////////// RaiseNoneAttrError ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneAttributeError(const char* attrname) {
PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", attrname);
}
/////////////// RaiseNoneIndexingError.proto ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneIndexingError(void);
/////////////// RaiseNoneIndexingError ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneIndexingError(void) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is unsubscriptable");
}
/////////////// RaiseNoneIterError.proto ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);
/////////////// RaiseNoneIterError ///////////////
static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) {
PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable");
}
/////////////// RaiseTooManyValuesToUnpack.proto ///////////////
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
/////////////// RaiseTooManyValuesToUnpack ///////////////
static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
PyErr_Format(PyExc_ValueError,
"too many values to unpack (expected %"PY_FORMAT_SIZE_T"d)", expected);
}
/////////////// RaiseNeedMoreValuesToUnpack.proto ///////////////
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
/////////////// RaiseNeedMoreValuesToUnpack ///////////////
static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
PyErr_Format(PyExc_ValueError,
"need more than %"PY_FORMAT_SIZE_T"d value%s to unpack",
index, (index == 1) ? "" : "s");
}
/////////////// UnpackTupleError.proto ///////////////
static void __Pyx_UnpackTupleError(PyObject *, Py_ssize_t index); /*proto*/
/////////////// UnpackTupleError ///////////////
//@requires: RaiseNoneIterError
//@requires: RaiseNeedMoreValuesToUnpack
//@requires: RaiseTooManyValuesToUnpack
static void __Pyx_UnpackTupleError(PyObject *t, Py_ssize_t index) {
if (t == Py_None) {
__Pyx_RaiseNoneNotIterableError();
} else if (PyTuple_GET_SIZE(t) < index) {
__Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(t));
} else {
__Pyx_RaiseTooManyValuesError(index);
}
}
/////////////// UnpackItem.proto ///////////////
static PyObject *__Pyx_UnpackItem(PyObject *, Py_ssize_t index); /*proto*/
/////////////// UnpackItem ///////////////
//@requires: RaiseNeedMoreValuesToUnpack
static PyObject *__Pyx_UnpackItem(PyObject *iter, Py_ssize_t index) {
PyObject *item;
if (!(item = PyIter_Next(iter))) {
if (!PyErr_Occurred()) {
__Pyx_RaiseNeedMoreValuesError(index);
}
}
return item;
}
/////////////// UnpackItemEndCheck.proto ///////////////
static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/
/////////////// UnpackItemEndCheck ///////////////
//@requires: RaiseTooManyValuesToUnpack
//@requires: IterFinish
static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
if (unlikely(retval)) {
Py_DECREF(retval);
__Pyx_RaiseTooManyValuesError(expected);
return -1;
} else {
return __Pyx_IterFinish();
}
return 0;
}
/////////////// UnpackTuple2.proto ///////////////
static CYTHON_INLINE int __Pyx_unpack_tuple2(PyObject* tuple, PyObject** value1, PyObject** value2,
int is_tuple, int has_known_size, int decref_tuple);
/////////////// UnpackTuple2 ///////////////
//@requires: UnpackItem
//@requires: UnpackItemEndCheck
//@requires: UnpackTupleError
static CYTHON_INLINE int __Pyx_unpack_tuple2(PyObject* tuple, PyObject** pvalue1, PyObject** pvalue2,
int is_tuple, int has_known_size, int decref_tuple) {
PyObject *value1 = NULL, *value2 = NULL, *iter = NULL;
if (!is_tuple && unlikely(!PyTuple_Check(tuple))) {
iter = PyObject_GetIter(tuple);
if (unlikely(!iter)) goto bad;
if (decref_tuple) { Py_DECREF(tuple); tuple = NULL; }
value1 = __Pyx_UnpackItem(iter, 0);
if (unlikely(!value1)) goto bad;
value2 = __Pyx_UnpackItem(iter, 1);
if (unlikely(!value2)) goto bad;
if (!has_known_size && unlikely(__Pyx_IternextUnpackEndCheck(PyIter_Next(iter), 2))) goto bad;
Py_DECREF(iter);
} else {
if (!has_known_size && unlikely(PyTuple_GET_SIZE(tuple) != 2)) {
__Pyx_UnpackTupleError(tuple, 2);
goto bad;
}
value1 = PyTuple_GET_ITEM(tuple, 0);
value2 = PyTuple_GET_ITEM(tuple, 1);
Py_INCREF(value1);
Py_INCREF(value2);
if (decref_tuple) { Py_DECREF(tuple); }
}
*pvalue1 = value1;
*pvalue2 = value2;
return 0;
bad:
Py_XDECREF(iter);
Py_XDECREF(value1);
Py_XDECREF(value2);
if (decref_tuple) { Py_XDECREF(tuple); }
return -1;
}
/////////////// IterFinish.proto ///////////////
static CYTHON_INLINE int __Pyx_IterFinish(void); /*proto*/
/////////////// IterFinish ///////////////
// When PyIter_Next(iter) has returned NULL in order to signal termination,
// this function does the right cleanup and returns 0 on success. If it
// detects an error that occurred in the iterator, it returns -1.
static CYTHON_INLINE int __Pyx_IterFinish(void) {
#if CYTHON_COMPILING_IN_CPYTHON
PyThreadState *tstate = PyThreadState_GET();
PyObject* exc_type = tstate->curexc_type;
if (unlikely(exc_type)) {
if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) {
PyObject *exc_value, *exc_tb;
exc_value = tstate->curexc_value;
exc_tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
Py_DECREF(exc_type);
Py_XDECREF(exc_value);
Py_XDECREF(exc_tb);
return 0;
} else {
return -1;
}
}
return 0;
#else
if (unlikely(PyErr_Occurred())) {
if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) {
PyErr_Clear();
return 0;
} else {
return -1;
}
}
return 0;
#endif
}
......@@ -299,6 +299,111 @@ static CYTHON_INLINE PyObject* __Pyx_PyDict_Clear(PyObject* d) {
return Py_None;
}
/////////////// dict_iter.proto ///////////////
static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict, PyObject* method_name,
Py_ssize_t* p_orig_length, int* p_is_dict);
static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos,
PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict);
/////////////// dict_iter ///////////////
//@requires: ObjectHandling.c::UnpackTuple2
//@requires: ObjectHandling.c::IterFinish
static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* iterable, int is_dict, PyObject* method_name,
Py_ssize_t* p_orig_length, int* p_source_is_dict) {
is_dict = is_dict || likely(PyDict_CheckExact(iterable));
*p_source_is_dict = is_dict;
#if !CYTHON_COMPILING_IN_PYPY
if (is_dict) {
*p_orig_length = PyDict_Size(iterable);
Py_INCREF(iterable);
return iterable;
}
#endif
*p_orig_length = 0;
if (method_name) {
PyObject* iter;
iterable = PyObject_CallMethodObjArgs(iterable, method_name, NULL);
if (!iterable)
return NULL;
#if !CYTHON_COMPILING_IN_PYPY
if (PyTuple_CheckExact(iterable) || PyList_CheckExact(iterable))
return iterable;
#endif
iter = PyObject_GetIter(iterable);
Py_DECREF(iterable);
return iter;
}
return PyObject_GetIter(iterable);
}
static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* iter_obj, Py_ssize_t orig_length, Py_ssize_t* ppos,
PyObject** pkey, PyObject** pvalue, PyObject** pitem, int source_is_dict) {
PyObject* next_item;
#if !CYTHON_COMPILING_IN_PYPY
if (source_is_dict) {
PyObject *key, *value;
if (unlikely(orig_length != PyDict_Size(iter_obj))) {
PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration");
return -1;
}
if (unlikely(!PyDict_Next(iter_obj, ppos, &key, &value))) {
return 0;
}
if (pitem) {
PyObject* tuple = PyTuple_New(2);
if (unlikely(!tuple)) {
return -1;
}
Py_INCREF(key);
Py_INCREF(value);
PyTuple_SET_ITEM(tuple, 0, key);
PyTuple_SET_ITEM(tuple, 1, value);
*pitem = tuple;
} else {
if (pkey) {
Py_INCREF(key);
*pkey = key;
}
if (pvalue) {
Py_INCREF(value);
*pvalue = value;
}
}
return 1;
} else if (PyTuple_CheckExact(iter_obj)) {
Py_ssize_t pos = *ppos;
if (unlikely(pos >= PyTuple_GET_SIZE(iter_obj))) return 0;
*ppos = pos + 1;
next_item = PyTuple_GET_ITEM(iter_obj, pos);
Py_INCREF(next_item);
} else if (PyList_CheckExact(iter_obj)) {
Py_ssize_t pos = *ppos;
if (unlikely(pos >= PyList_GET_SIZE(iter_obj))) return 0;
*ppos = pos + 1;
next_item = PyList_GET_ITEM(iter_obj, pos);
Py_INCREF(next_item);
} else
#endif
{
next_item = PyIter_Next(iter_obj);
if (unlikely(!next_item)) {
return __Pyx_IterFinish();
}
}
if (pitem) {
*pitem = next_item;
} else if (pkey && pvalue) {
if (__Pyx_unpack_tuple2(next_item, pkey, pvalue, source_is_dict, source_is_dict, 1))
return -1;
} else if (pkey) {
*pkey = next_item;
} else {
*pvalue = next_item;
}
return 1;
}
/////////////// pyobject_as_double.proto ///////////////
......
......@@ -36,9 +36,34 @@ def iteritems(dict d):
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def iteritems_dict(dict d):
def optimistic_iteritems(d):
"""
>>> iteritems_dict(d)
>>> optimistic_iteritems(d)
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems({})
[]
>>> class mydict(object):
... def __init__(self, t): self.t = t
... def iteritems(self): return self.t(d.items())
>>> optimistic_iteritems(mydict(list))
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems(mydict(tuple))
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems(mydict(iter))
[(10, 0), (11, 1), (12, 2), (13, 3)]
"""
l = []
for k,v in d.iteritems():
l.append((k,v))
l.sort()
return l
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def iteritems_dict():
"""
>>> iteritems_dict()
[(11, 1), (12, 2), (13, 3)]
"""
l = []
......@@ -56,6 +81,51 @@ def iteritems_int(dict d):
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> iteritems_int({})
[]
>>> iteritems_int({'a': 1})
Traceback (most recent call last):
TypeError: an integer is required
>>> iteritems_int({1: 'b'})
Traceback (most recent call last):
TypeError: an integer is required
>>> iteritems_int({'a': 'b'})
Traceback (most recent call last):
TypeError: an integer is required
"""
cdef int k,v
l = []
for k,v in d.iteritems():
l.append((k,v))
l.sort()
return l
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def optimistic_iteritems_int(d):
"""
>>> optimistic_iteritems_int(d)
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems_int({})
[]
>>> class mydict(object):
... def __init__(self, t): self.t = t
... def iteritems(self): return self.t(d.items())
>>> optimistic_iteritems_int(mydict(list))
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems_int(mydict(tuple))
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems_int(mydict(iter))
[(10, 0), (11, 1), (12, 2), (13, 3)]
>>> optimistic_iteritems_int({'a': 1})
Traceback (most recent call last):
TypeError: an integer is required
>>> optimistic_iteritems_int({1: 'b'})
Traceback (most recent call last):
TypeError: an integer is required
>>> optimistic_iteritems_int({'a': 'b'})
Traceback (most recent call last):
TypeError: an integer is required
"""
cdef int k,v
l = []
......@@ -104,6 +174,57 @@ def iterkeys(dict d):
l.sort()
return l
@cython.test_fail_if_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def iterkeys_argerror(dict d):
"""
>>> try: iterkeys_argerror(d)
... except (TypeError, AttributeError): pass
"""
for k in d.iterkeys(1):
print k
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def optimistic_iterkeys(d):
"""
>>> optimistic_iterkeys(d)
[10, 11, 12, 13]
>>> optimistic_iterkeys({})
[]
>>> class mydict(object):
... def __init__(self, t): self.t = t
... def iterkeys(self): return self.t(d)
>>> optimistic_iterkeys(mydict(lambda x:x))
[10, 11, 12, 13]
>>> optimistic_iterkeys(mydict(lambda x:x.keys()))
[10, 11, 12, 13]
>>> optimistic_iterkeys(mydict(list))
[10, 11, 12, 13]
>>> optimistic_iterkeys(mydict(tuple))
[10, 11, 12, 13]
>>> optimistic_iterkeys(mydict(iter))
[10, 11, 12, 13]
"""
l = []
for k in d.iterkeys():
l.append(k)
l.sort()
return l
@cython.test_fail_if_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def optimistic_iterkeys_argerror(d):
"""
>>> try: optimistic_iterkeys_argerror(d)
... except (TypeError, AttributeError): pass
"""
for k in d.iterkeys(1):
print k
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
......@@ -113,6 +234,9 @@ def iterkeys_int(dict d):
[10, 11, 12, 13]
>>> iterkeys_int({})
[]
>>> iterkeys_int({'a': 'b'})
Traceback (most recent call last):
TypeError: an integer is required
"""
cdef int k
l = []
......@@ -146,6 +270,9 @@ def iterdict_int(dict d):
[10, 11, 12, 13]
>>> iterdict_int({})
[]
>>> iterdict_int({'a': 'b'})
Traceback (most recent call last):
TypeError: an integer is required
"""
cdef int k
l = []
......@@ -202,6 +329,33 @@ def itervalues(dict d):
l.sort()
return l
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def optimistic_itervalues(d):
"""
>>> optimistic_itervalues(d)
[0, 1, 2, 3]
>>> optimistic_itervalues({})
[]
>>> class mydict(object):
... def __init__(self, t): self.t = t
... def itervalues(self): return self.t(d.values())
>>> optimistic_itervalues(mydict(lambda x:x))
[0, 1, 2, 3]
>>> optimistic_itervalues(mydict(list))
[0, 1, 2, 3]
>>> optimistic_itervalues(mydict(tuple))
[0, 1, 2, 3]
>>> optimistic_itervalues(mydict(iter))
[0, 1, 2, 3]
"""
l = []
for v in d.itervalues():
l.append(v)
l.sort()
return l
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
......@@ -211,6 +365,9 @@ def itervalues_int(dict d):
[0, 1, 2, 3]
>>> itervalues_int({})
[]
>>> itervalues_int({'a': 'b'})
Traceback (most recent call last):
TypeError: an integer is required
"""
cdef int v
l = []
......@@ -276,3 +433,40 @@ def iterdict_change_size(dict d):
if count > 5:
break # safety
return "DONE"
@cython.test_assert_path_exists(
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode")
def optimistic_iterdict_change_size(d):
"""
>>> count, i = 0, -1
>>> d = {1:2, 10:20}
>>> for i in d:
... d[i+1] = 5
... count += 1
... if count > 5:
... break # safety
Traceback (most recent call last):
RuntimeError: dictionary changed size during iteration
>>> optimistic_iterdict_change_size({1:2, 10:20})
Traceback (most recent call last):
RuntimeError: dictionary changed size during iteration
>>> print( optimistic_iterdict_change_size({}) )
DONE
>>> class mydict(object):
... _d = {1:2, 10:20}
... def iterkeys(self): return self._d
... def __setitem__(self, key, value): self._d[key] = value
>>> optimistic_iterdict_change_size(mydict())
Traceback (most recent call last):
RuntimeError: dictionary changed size during iteration
"""
cdef int count = 0
i = -1
for i in d.iterkeys():
d[i+1] = 5
count += 1
if count > 5:
break # safety
return "DONE"
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment