Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Labels
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Commits
Open sidebar
nexedi
cython
Commits
e01f6d7b
Commit
e01f6d7b
authored
Dec 02, 2015
by
Kevin R. Thornton
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #2 from cython/master
Merge upstream
parents
afb995d7
0c62e665
Changes
39
Hide whitespace changes
Inline
Side-by-side
Showing
39 changed files
with
663 additions
and
144 deletions
+663
-144
CHANGES.rst
CHANGES.rst
+10
-0
Cython/Build/Dependencies.py
Cython/Build/Dependencies.py
+26
-4
Cython/Build/Inline.py
Cython/Build/Inline.py
+12
-10
Cython/Compiler/Buffer.py
Cython/Compiler/Buffer.py
+1
-1
Cython/Compiler/Code.py
Cython/Compiler/Code.py
+1
-1
Cython/Compiler/ExprNodes.py
Cython/Compiler/ExprNodes.py
+1
-2
Cython/Compiler/Nodes.py
Cython/Compiler/Nodes.py
+14
-8
Cython/Compiler/ParseTreeTransforms.py
Cython/Compiler/ParseTreeTransforms.py
+130
-25
Cython/Compiler/Pipeline.py
Cython/Compiler/Pipeline.py
+1
-1
Cython/Compiler/PyrexTypes.py
Cython/Compiler/PyrexTypes.py
+1
-1
Cython/Compiler/TreeFragment.py
Cython/Compiler/TreeFragment.py
+20
-4
Cython/Includes/cpython/exc.pxd
Cython/Includes/cpython/exc.pxd
+1
-1
Cython/Includes/cpython/unicode.pxd
Cython/Includes/cpython/unicode.pxd
+19
-16
Cython/Includes/libcpp/algorithm.pxd
Cython/Includes/libcpp/algorithm.pxd
+4
-0
Cython/Includes/libcpp/limits.pxd
Cython/Includes/libcpp/limits.pxd
+61
-0
Cython/Shadow.py
Cython/Shadow.py
+16
-2
Cython/TestUtils.py
Cython/TestUtils.py
+10
-3
Cython/Utility/MemoryView.pyx
Cython/Utility/MemoryView.pyx
+38
-5
docs/src/reference/compilation.rst
docs/src/reference/compilation.rst
+1
-1
docs/src/reference/extension_types.rst
docs/src/reference/extension_types.rst
+4
-4
docs/src/reference/language_basics.rst
docs/src/reference/language_basics.rst
+2
-2
docs/src/tutorial/numpy.rst
docs/src/tutorial/numpy.rst
+8
-31
docs/src/tutorial/profiling_tutorial.rst
docs/src/tutorial/profiling_tutorial.rst
+1
-1
docs/src/tutorial/pure.rst
docs/src/tutorial/pure.rst
+9
-0
docs/src/userguide/extension_types.rst
docs/src/userguide/extension_types.rst
+2
-2
docs/src/userguide/memoryviews.rst
docs/src/userguide/memoryviews.rst
+1
-1
docs/src/userguide/parallelism.rst
docs/src/userguide/parallelism.rst
+1
-1
docs/src/userguide/wrapping_CPlusPlus.rst
docs/src/userguide/wrapping_CPlusPlus.rst
+1
-1
pyximport/pyxbuild.py
pyximport/pyxbuild.py
+6
-3
pyximport/pyximport.py
pyximport/pyximport.py
+30
-13
runtests.py
runtests.py
+2
-0
tests/errors/cdef_class_properties_decorated.pyx
tests/errors/cdef_class_properties_decorated.pyx
+42
-0
tests/run/cdef_class_property_decorator_T264.pyx
tests/run/cdef_class_property_decorator_T264.pyx
+51
-0
tests/run/libcpp_all.pyx
tests/run/libcpp_all.pyx
+16
-0
tests/run/locals.pyx
tests/run/locals.pyx
+15
-0
tests/run/pure.pyx
tests/run/pure.pyx
+20
-0
tests/run/pure_cdef_class_property_decorator_T264.pxd
tests/run/pure_cdef_class_property_decorator_T264.pxd
+6
-0
tests/run/pure_cdef_class_property_decorator_T264.py
tests/run/pure_cdef_class_property_decorator_T264.py
+51
-0
tests/run/sequential_parallel.pyx
tests/run/sequential_parallel.pyx
+28
-0
No files found.
CHANGES.rst
View file @
e01f6d7b
...
...
@@ -59,6 +59,16 @@ Other changes
-------------
0.23.5 (2015-xx-yy)
===================
Bugs fixed
----------
* Fix prange() to behave identically to range(). The end condition was
miscalculated when the range was not exactly divisible by the step.
0.23.4 (2015-10-10)
===================
...
...
Cython/Build/Dependencies.py
View file @
e01f6d7b
...
...
@@ -64,6 +64,7 @@ else:
return
filename
basestring
=
str
def
extended_iglob
(
pattern
):
if
'{'
in
pattern
:
m
=
re
.
match
(
'(.*){([^}]+)}(.*)'
,
pattern
)
...
...
@@ -118,6 +119,7 @@ def file_hash(filename):
f
.
close
()
return
m
.
hexdigest
()
def
parse_list
(
s
):
"""
>>> parse_list("a b c")
...
...
@@ -143,6 +145,7 @@ def parse_list(s):
return
literal
return
[
unquote
(
item
)
for
item
in
s
.
split
(
delimiter
)
if
item
.
strip
()]
transitive_str
=
object
()
transitive_list
=
object
()
...
...
@@ -163,6 +166,7 @@ distutils_settings = {
'language'
:
transitive_str
,
}
@
cython
.
locals
(
start
=
cython
.
Py_ssize_t
,
end
=
cython
.
Py_ssize_t
)
def
line_iter
(
source
):
if
isinstance
(
source
,
basestring
):
...
...
@@ -178,6 +182,7 @@ def line_iter(source):
for
line
in
source
:
yield
line
class
DistutilsInfo
(
object
):
def
__init__
(
self
,
source
=
None
,
exn
=
None
):
...
...
@@ -254,6 +259,7 @@ class DistutilsInfo(object):
value
=
getattr
(
extension
,
key
)
+
list
(
value
)
setattr
(
extension
,
key
,
value
)
@
cython
.
locals
(
start
=
cython
.
Py_ssize_t
,
q
=
cython
.
Py_ssize_t
,
single_q
=
cython
.
Py_ssize_t
,
double_q
=
cython
.
Py_ssize_t
,
hash_mark
=
cython
.
Py_ssize_t
,
end
=
cython
.
Py_ssize_t
,
...
...
@@ -349,9 +355,11 @@ dependency_regex = re.compile(r"(?:^from +([0-9a-zA-Z_.]+) +cimport)|"
r"(?:^cdef +extern +from +['\"]([^'\"]+)['\"])|"
r"(?:^include +['\"]([^'\"]+)['\"])"
,
re
.
M
)
def
normalize_existing
(
base_path
,
rel_paths
):
return
normalize_existing0
(
os
.
path
.
dirname
(
base_path
),
tuple
(
set
(
rel_paths
)))
@
cached_function
def
normalize_existing0
(
base_dir
,
rel_paths
):
normalized
=
[]
...
...
@@ -363,6 +371,7 @@ def normalize_existing0(base_dir, rel_paths):
normalized
.
append
(
rel
)
return
normalized
def
resolve_depends
(
depends
,
include_dirs
):
include_dirs
=
tuple
(
include_dirs
)
resolved
=
[]
...
...
@@ -372,6 +381,7 @@ def resolve_depends(depends, include_dirs):
resolved
.
append
(
path
)
return
resolved
@
cached_function
def
resolve_depend
(
depend
,
include_dirs
):
if
depend
[
0
]
==
'<'
and
depend
[
-
1
]
==
'>'
:
...
...
@@ -382,6 +392,7 @@ def resolve_depend(depend, include_dirs):
return
os
.
path
.
normpath
(
path
)
return
None
@
cached_function
def
package
(
filename
):
dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
str
(
filename
)))
...
...
@@ -390,6 +401,7 @@ def package(filename):
else
:
return
()
@
cached_function
def
fully_qualified_name
(
filename
):
module
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
filename
))[
0
]
...
...
@@ -398,7 +410,7 @@ def fully_qualified_name(filename):
@
cached_function
def
parse_dependencies
(
source_filename
):
# Actual parsing is way to slow, so we use regular expressions.
# Actual parsing is way to
o
slow, so we use regular expressions.
# The only catch is that we must strip comments and string
# literals ahead of time.
fh
=
Utils
.
open_source_file
(
source_filename
,
error_handling
=
'ignore'
)
...
...
@@ -435,6 +447,8 @@ class DependencyTree(object):
self
.
_transitive_cache
=
{}
def
parse_dependencies
(
self
,
source_filename
):
if
path_exists
(
source_filename
):
source_filename
=
os
.
path
.
normpath
(
source_filename
)
return
parse_dependencies
(
source_filename
)
@
cached_method
...
...
@@ -598,7 +612,9 @@ class DependencyTree(object):
finally
:
del
stack
[
node
]
_dep_tree
=
None
def
create_dependency_tree
(
ctx
=
None
,
quiet
=
False
):
global
_dep_tree
if
_dep_tree
is
None
:
...
...
@@ -609,8 +625,10 @@ def create_dependency_tree(ctx=None, quiet=False):
# This may be useful for advanced users?
def
create_extension_list
(
patterns
,
exclude
=
[]
,
ctx
=
None
,
aliases
=
None
,
quiet
=
False
,
language
=
None
,
def
create_extension_list
(
patterns
,
exclude
=
None
,
ctx
=
None
,
aliases
=
None
,
quiet
=
False
,
language
=
None
,
exclude_failures
=
False
):
if
exclude
is
None
:
exclude
=
[]
if
not
isinstance
(
patterns
,
(
list
,
tuple
)):
patterns
=
[
patterns
]
explicit_modules
=
set
([
m
.
name
for
m
in
patterns
if
isinstance
(
m
,
Extension
)])
...
...
@@ -709,7 +727,7 @@ def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
# This is the user-exposed entry point.
def
cythonize
(
module_list
,
exclude
=
[]
,
nthreads
=
0
,
aliases
=
None
,
quiet
=
False
,
force
=
False
,
language
=
None
,
def
cythonize
(
module_list
,
exclude
=
None
,
nthreads
=
0
,
aliases
=
None
,
quiet
=
False
,
force
=
False
,
language
=
None
,
exclude_failures
=
False
,
**
options
):
"""
Compile a set of source modules into C/C++ files and return a list of distutils
...
...
@@ -737,6 +755,8 @@ def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
Additional compilation options can be passed as keyword arguments.
"""
if
exclude
is
None
:
exclude
=
[]
if
'include_path'
not
in
options
:
options
[
'include_path'
]
=
[
'.'
]
if
'common_utility_include_dir'
in
options
:
...
...
@@ -935,7 +955,9 @@ if os.environ.get('XML_RESULTS'):
output
.
close
()
return
with_record
else
:
record_results
=
lambda
x
:
x
def
record_results
(
func
):
return
func
# TODO: Share context? Issue: pyx processing leaks into pxd module
@
record_results
...
...
Cython/Build/Inline.py
View file @
e01f6d7b
...
...
@@ -48,6 +48,7 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
super
(
UnboundSymbols
,
self
).
__call__
(
node
)
return
self
.
unbound
@
cached_function
def
unbound_symbols
(
code
,
context
=
None
):
code
=
to_unicode
(
code
)
...
...
@@ -67,6 +68,7 @@ def unbound_symbols(code, context=None):
import
__builtin__
as
builtins
return
UnboundSymbols
()(
tree
)
-
set
(
dir
(
builtins
))
def
unsafe_type
(
arg
,
context
=
None
):
py_type
=
type
(
arg
)
if
py_type
is
int
:
...
...
@@ -74,6 +76,7 @@ def unsafe_type(arg, context=None):
else
:
return
safe_type
(
arg
,
context
)
def
safe_type
(
arg
,
context
=
None
):
py_type
=
type
(
arg
)
if
py_type
in
[
list
,
tuple
,
dict
,
str
]:
...
...
@@ -97,6 +100,7 @@ def safe_type(arg, context=None):
return
'%s.%s'
%
(
base_type
.
__module__
,
base_type
.
__name__
)
return
'object'
def
_get_build_extension
():
dist
=
Distribution
()
# Ensure the build respects distutils configuration by parsing
...
...
@@ -107,19 +111,16 @@ def _get_build_extension():
build_extension
.
finalize_options
()
return
build_extension
@
cached_function
def
_create_context
(
cython_include_dirs
):
return
Context
(
list
(
cython_include_dirs
),
default_options
)
def
cython_inline
(
code
,
get_type
=
unsafe_type
,
lib_dir
=
os
.
path
.
join
(
get_cython_cache_dir
(),
'inline'
),
cython_include_dirs
=
[
'.'
],
force
=
False
,
quiet
=
False
,
locals
=
None
,
globals
=
None
,
**
kwds
):
def
cython_inline
(
code
,
get_type
=
unsafe_type
,
lib_dir
=
os
.
path
.
join
(
get_cython_cache_dir
(),
'inline'
),
cython_include_dirs
=
None
,
force
=
False
,
quiet
=
False
,
locals
=
None
,
globals
=
None
,
**
kwds
):
if
cython_include_dirs
is
None
:
cython_include_dirs
=
[
'.'
]
if
get_type
is
None
:
get_type
=
lambda
x
:
'object'
code
=
to_unicode
(
code
)
...
...
@@ -263,7 +264,6 @@ def extract_func_code(code):
return
'
\
n
'
.
join
(
module
),
' '
+
'
\
n
'
.
join
(
function
)
try
:
from
inspect
import
getcallargs
except
ImportError
:
...
...
@@ -294,6 +294,7 @@ except ImportError:
raise
TypeError
(
"Missing argument: %s"
%
name
)
return
all
def
get_body
(
source
):
ix
=
source
.
index
(
':'
)
if
source
[:
5
]
==
'lambda'
:
...
...
@@ -301,6 +302,7 @@ def get_body(source):
else
:
return
source
[
ix
+
1
:]
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class
RuntimeCompiledFunction
(
object
):
...
...
Cython/Compiler/Buffer.py
View file @
e01f6d7b
...
...
@@ -574,7 +574,7 @@ class GetAndReleaseBufferUtilityCode(object):
def
__hash__
(
self
):
return
24342342
def
get_tree
(
self
):
pass
def
get_tree
(
self
,
**
kwargs
):
pass
def
put_code
(
self
,
output
):
code
=
output
[
'utility_code_def'
]
...
...
Cython/Compiler/Code.py
View file @
e01f6d7b
...
...
@@ -318,7 +318,7 @@ class UtilityCodeBase(object):
def __str__(self):
return "
<%
s
(
%
s
)
>
" % (type(self).__name__, self.name)
def get_tree(self):
def get_tree(self
, **kwargs
):
pass
...
...
Cython/Compiler/ExprNodes.py
View file @
e01f6d7b
...
...
@@ -9625,8 +9625,7 @@ class TypecastNode(ExprNode):
return
CoerceIntToBytesNode
(
self
.
operand
,
env
)
elif
self
.
operand
.
type
.
can_coerce_to_pyobject
(
env
):
self
.
result_ctype
=
py_object_type
base_type
=
self
.
base_type
.
analyse
(
env
)
self
.
operand
=
self
.
operand
.
coerce_to
(
base_type
,
env
)
self
.
operand
=
self
.
operand
.
coerce_to
(
self
.
type
,
env
)
else
:
if
self
.
operand
.
type
.
is_ptr
:
if
not
(
self
.
operand
.
type
.
base_type
.
is_void
or
self
.
operand
.
type
.
base_type
.
is_struct
):
...
...
Cython/Compiler/Nodes.py
View file @
e01f6d7b
...
...
@@ -649,7 +649,9 @@ class CFuncDeclaratorNode(CDeclaratorNode):
else
:
return
None
def
analyse
(
self
,
return_type
,
env
,
nonempty
=
0
,
directive_locals
=
{}):
def
analyse
(
self
,
return_type
,
env
,
nonempty
=
0
,
directive_locals
=
None
):
if
directive_locals
is
None
:
directive_locals
=
{}
if
nonempty
:
nonempty
-=
1
func_type_args
=
[]
...
...
@@ -8186,8 +8188,8 @@ class ParallelStatNode(StatNode, ParallelNode):
code
.
set_all_labels
(
self
.
old_loop_labels
+
(
self
.
old_return_label
,
self
.
old_error_label
))
def
end_parallel_control_flow_block
(
self
,
code
,
break_
=
False
,
continue
_
=
False
):
def
end_parallel_control_flow_block
(
self
,
code
,
break_
=
False
,
continue_
=
False
,
return
_
=
False
):
"""
This ends the parallel control flow block and based on how the parallel
section was exited, takes the corresponding action. The break_ and
...
...
@@ -8244,8 +8246,9 @@ class ParallelStatNode(StatNode, ParallelNode):
code
.
put
(
" case 2: "
)
code
.
put_goto
(
code
.
break_label
)
code
.
put
(
" case 3: "
)
code
.
put_goto
(
code
.
return_label
)
if
return_
:
code
.
put
(
" case 3: "
)
code
.
put_goto
(
code
.
return_label
)
if
self
.
error_label_used
:
code
.
globalstate
.
use_utility_code
(
restore_exception_utility_code
)
...
...
@@ -8323,10 +8326,12 @@ class ParallelWithBlockNode(ParallelStatNode):
continue_
=
code
.
label_used
(
code
.
continue_label
)
break_
=
code
.
label_used
(
code
.
break_label
)
return_
=
code
.
label_used
(
code
.
return_label
)
self
.
restore_labels
(
code
)
self
.
end_parallel_control_flow_block
(
code
,
break_
=
break_
,
continue_
=
continue_
)
continue_
=
continue_
,
return_
=
return_
)
self
.
release_closure_privates
(
code
)
...
...
@@ -8528,6 +8533,7 @@ class ParallelRangeNode(ParallelStatNode):
# the start, stop , step, temps and target cnames
fmt_dict
=
{
'target'
:
target_index_cname
,
'target_type'
:
self
.
target
.
type
.
empty_declaration_code
()
}
# Setup start, stop and step, allocating temps if needed
...
...
@@ -8556,7 +8562,7 @@ class ParallelRangeNode(ParallelStatNode):
self
.
control_flow_var_code_point
=
code
.
insertion_point
()
# Note: nsteps is private in an outer scope if present
code
.
putln
(
"%(nsteps)s = (%(stop)s - %(start)s) / %(step)s;"
%
fmt_dict
)
code
.
putln
(
"%(nsteps)s = (%(stop)s - %(start)s
+ %(step)s - %(step)s/abs(%(step)s)
) / %(step)s;"
%
fmt_dict
)
# The target iteration variable might not be initialized, do it only if
# we are executing at least 1 iteration, otherwise we should leave the
...
...
@@ -8670,7 +8676,7 @@ class ParallelRangeNode(ParallelStatNode):
# at least it doesn't spoil indentation
code
.
begin_block
()
code
.
putln
(
"%(target)s =
%(start)s + %(step)s * %(i)s
;"
%
fmt_dict
)
code
.
putln
(
"%(target)s =
(%(target_type)s)(%(start)s + %(step)s * %(i)s)
;"
%
fmt_dict
)
self
.
initialize_privates_to_nan
(
code
,
exclude
=
self
.
target
.
entry
)
if
self
.
is_parallel
:
...
...
Cython/Compiler/ParseTreeTransforms.py
View file @
e01f6d7b
...
...
@@ -1271,37 +1271,118 @@ class WithTransform(CythonTransform, SkipDeclarations):
class
DecoratorTransform
(
ScopeTrackingTransform
,
SkipDeclarations
):
"""Originally, this was the only place where decorators were
transformed into the corresponding calling code. Now, this is
done directly in DefNode and PyClassDefNode to avoid reassignments
to the function/class name - except for cdef class methods. For
those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator
"""
Transforms method decorators in cdef classes into nested calls or properties.
def
visit_DefNode
(
self
,
func_node
):
Python-style decorator properties are transformed into a PropertyNode
with up to the three getter, setter and deleter DefNodes.
The functional style isn't supported yet.
"""
_properties
=
None
_map_property_attribute
=
{
'getter'
:
'__get__'
,
'setter'
:
'__set__'
,
'deleter'
:
'__del__'
,
}.
get
def
visit_CClassDefNode
(
self
,
node
):
if
self
.
_properties
is
None
:
self
.
_properties
=
[]
self
.
_properties
.
append
({})
super
(
DecoratorTransform
,
self
).
visit_CClassDefNode
(
node
)
self
.
_properties
.
pop
()
return
node
def
visit_PropertyNode
(
self
,
node
):
# Suppress warning for our code until we can convert all our uses over.
if
isinstance
(
node
.
pos
[
0
],
str
)
or
True
:
warning
(
node
.
pos
,
"'property %s:' syntax is deprecated, use '@property'"
%
node
.
name
,
2
)
return
node
def
visit_DefNode
(
self
,
node
):
scope_type
=
self
.
scope_type
func_node
=
self
.
visit_FuncDefNode
(
func_node
)
if
scope_type
!=
'cclass'
or
not
func_node
.
decorators
:
return
func_node
return
self
.
handle_decorators
(
func_node
,
func_node
.
decorators
,
func_node
.
name
)
def
handle_decorators
(
self
,
node
,
decorators
,
name
):
decorator_result
=
ExprNodes
.
NameNode
(
node
.
pos
,
name
=
name
)
node
=
self
.
visit_FuncDefNode
(
node
)
if
scope_type
!=
'cclass'
or
not
node
.
decorators
:
return
node
# transform @property decorators
properties
=
self
.
_properties
[
-
1
]
for
decorator_node
in
node
.
decorators
[::
-
1
]:
decorator
=
decorator_node
.
decorator
if
decorator
.
is_name
and
decorator
.
name
==
'property'
:
if
len
(
node
.
decorators
)
>
1
:
return
self
.
_reject_decorated_property
(
node
,
decorator_node
)
name
=
node
.
name
node
.
name
=
'__get__'
node
.
decorators
.
remove
(
decorator_node
)
stat_list
=
[
node
]
if
name
in
properties
:
prop
=
properties
[
name
]
prop
.
pos
=
node
.
pos
prop
.
doc
=
node
.
doc
prop
.
body
.
stats
=
stat_list
return
[]
prop
=
Nodes
.
PropertyNode
(
node
.
pos
,
name
=
name
)
prop
.
doc
=
node
.
doc
prop
.
body
=
Nodes
.
StatListNode
(
node
.
pos
,
stats
=
stat_list
)
properties
[
name
]
=
prop
return
[
prop
]
elif
decorator
.
is_attribute
and
decorator
.
obj
.
name
in
properties
:
handler_name
=
self
.
_map_property_attribute
(
decorator
.
attribute
)
if
handler_name
:
assert
decorator
.
obj
.
name
==
node
.
name
if
len
(
node
.
decorators
)
>
1
:
return
self
.
_reject_decorated_property
(
node
,
decorator_node
)
return
self
.
_add_to_property
(
properties
,
node
,
handler_name
,
decorator_node
)
# transform normal decorators
return
self
.
chain_decorators
(
node
,
node
.
decorators
,
node
.
name
)
@
staticmethod
def
_reject_decorated_property
(
node
,
decorator_node
):
# restrict transformation to outermost decorator as wrapped properties will probably not work
for
deco
in
node
.
decorators
:
if
deco
!=
decorator_node
:
error
(
deco
.
pos
,
"Property methods with additional decorators are not supported"
)
return
node
@
staticmethod
def
_add_to_property
(
properties
,
node
,
name
,
decorator
):
prop
=
properties
[
node
.
name
]
node
.
name
=
name
node
.
decorators
.
remove
(
decorator
)
stats
=
prop
.
body
.
stats
for
i
,
stat
in
enumerate
(
stats
):
if
stat
.
name
==
name
:
stats
[
i
]
=
node
break
else
:
stats
.
append
(
node
)
return
[]
@
staticmethod
def
chain_decorators
(
node
,
decorators
,
name
):
"""
Decorators are applied directly in DefNode and PyClassDefNode to avoid
reassignments to the function/class name - except for cdef class methods.
For those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator.
"""
decorator_result
=
ExprNodes
.
NameNode
(
node
.
pos
,
name
=
name
)
for
decorator
in
decorators
[::
-
1
]:
decorator_result
=
ExprNodes
.
SimpleCallNode
(
decorator
.
pos
,
function
=
decorator
.
decorator
,
args
=
[
decorator_result
])
function
=
decorator
.
decorator
,
args
=
[
decorator_result
])
name_node
=
ExprNodes
.
NameNode
(
node
.
pos
,
name
=
name
)
name_node
=
ExprNodes
.
NameNode
(
node
.
pos
,
name
=
name
)
reassignment
=
Nodes
.
SingleAssignmentNode
(
node
.
pos
,
lhs
=
name_node
,
rhs
=
decorator_result
)
lhs
=
name_node
,
rhs
=
decorator_result
)
reassignment
=
Nodes
.
IndirectionNode
([
reassignment
])
node
.
decorator_indirection
=
reassignment
...
...
@@ -1500,7 +1581,7 @@ if VALUE is not None:
if
decorators
:
transform
=
DecoratorTransform
(
self
.
context
)
def_node
=
node
.
node
_
,
reassignments
=
transform
.
handle
_decorators
(
_
,
reassignments
=
transform
.
chain
_decorators
(
def_node
,
decorators
,
def_node
.
name
)
reassignments
.
analyse_declarations
(
env
)
node
=
[
node
,
reassignments
]
...
...
@@ -2703,11 +2784,13 @@ class TransformBuiltinMethods(EnvTransform):
node
.
function
.
pos
,
operand1
=
node
.
args
[
0
],
operand2
=
node
.
args
[
1
])
elif
function
==
u'cast'
:
if
len
(
node
.
args
)
!=
2
:
error
(
node
.
function
.
pos
,
u"cast() takes exactly two arguments"
)
error
(
node
.
function
.
pos
,
u"cast() takes exactly two arguments and an optional typecheck keyword"
)
else
:
type
=
node
.
args
[
0
].
analyse_as_type
(
self
.
current_env
())
if
type
:
node
=
ExprNodes
.
TypecastNode
(
node
.
function
.
pos
,
type
=
type
,
operand
=
node
.
args
[
1
])
node
=
ExprNodes
.
TypecastNode
(
node
.
function
.
pos
,
type
=
type
,
operand
=
node
.
args
[
1
],
typecheck
=
False
)
else
:
error
(
node
.
args
[
0
].
pos
,
"Not a type"
)
elif
function
==
u'sizeof'
:
...
...
@@ -2753,6 +2836,28 @@ class TransformBuiltinMethods(EnvTransform):
return
self
.
_inject_super
(
node
,
func_name
)
return
node
def
visit_GeneralCallNode
(
self
,
node
):
function
=
node
.
function
.
as_cython_attribute
()
if
function
:
args
=
node
.
positional_args
.
args
kwargs
=
node
.
keyword_args
.
compile_time_value
(
None
)
if
function
==
u'cast'
:
if
(
len
(
args
)
!=
2
or
len
(
kwargs
)
>
1
or
(
len
(
kwargs
)
==
1
and
'typecheck'
not
in
kwargs
)):
error
(
node
.
function
.
pos
,
u"cast() takes exactly two arguments and an optional typecheck keyword"
)
else
:
type
=
args
[
0
].
analyse_as_type
(
self
.
current_env
())
if
type
:
typecheck
=
kwargs
.
get
(
'typecheck'
,
False
)
node
=
ExprNodes
.
TypecastNode
(
node
.
function
.
pos
,
type
=
type
,
operand
=
args
[
1
],
typecheck
=
typecheck
)
else
:
error
(
args
[
0
].
pos
,
"Not a type"
)
self
.
visitchildren
(
node
)
return
node
class
ReplaceFusedTypeChecks
(
VisitorTransform
):
"""
...
...
Cython/Compiler/Pipeline.py
View file @
e01f6d7b
...
...
@@ -127,7 +127,7 @@ def inject_utility_code_stage_factory(context):
for
dep
in
utilcode
.
requires
:
if
dep
not
in
added
and
dep
not
in
module_node
.
scope
.
utility_code_list
:
module_node
.
scope
.
utility_code_list
.
append
(
dep
)
tree
=
utilcode
.
get_tree
()
tree
=
utilcode
.
get_tree
(
cython_scope
=
context
.
cython_scope
)
if
tree
:
module_node
.
merge_in
(
tree
.
body
,
tree
.
scope
,
merge_scope
=
True
)
return
module_node
...
...
Cython/Compiler/PyrexTypes.py
View file @
e01f6d7b
...
...
@@ -3202,7 +3202,7 @@ class ToPyStructUtilityCode(object):
def
__hash__
(
self
):
return
hash
(
self
.
header
)
def
get_tree
(
self
):
def
get_tree
(
self
,
**
kwargs
):
pass
def
put_code
(
self
,
output
):
...
...
Cython/Compiler/TreeFragment.py
View file @
e01f6d7b
...
...
@@ -39,7 +39,7 @@ class StringParseContext(Main.Context):
return
ModuleScope
(
module_name
,
parent_module
=
None
,
context
=
self
)
def
parse_from_strings
(
name
,
code
,
pxds
=
{}
,
level
=
None
,
initial_pos
=
None
,
def
parse_from_strings
(
name
,
code
,
pxds
=
None
,
level
=
None
,
initial_pos
=
None
,
context
=
None
,
allow_struct_enum_decorator
=
False
):
"""
Utility method to parse a (unicode) string of code. This is mostly
...
...
@@ -86,6 +86,7 @@ def parse_from_strings(name, code, pxds={}, level=None, initial_pos=None,
tree
.
scope
=
scope
return
tree
class
TreeCopier
(
VisitorTransform
):
def
visit_Node
(
self
,
node
):
if
node
is
None
:
...
...
@@ -95,6 +96,7 @@ class TreeCopier(VisitorTransform):
self
.
visitchildren
(
c
)
return
c
class
ApplyPositionAndCopy
(
TreeCopier
):
def
__init__
(
self
,
pos
):
super
(
ApplyPositionAndCopy
,
self
).
__init__
()
...
...
@@ -105,6 +107,7 @@ class ApplyPositionAndCopy(TreeCopier):
copy
.
pos
=
self
.
pos
return
copy
class
TemplateTransform
(
VisitorTransform
):
"""
Makes a copy of a template tree while doing substitutions.
...
...
@@ -212,9 +215,16 @@ def strip_common_indent(lines):
class
TreeFragment
(
object
):
def
__init__
(
self
,
code
,
name
=
None
,
pxds
=
{},
temps
=
[],
pipeline
=
[],
level
=
None
,
initial_pos
=
None
):
def
__init__
(
self
,
code
,
name
=
None
,
pxds
=
None
,
temps
=
None
,
pipeline
=
None
,
level
=
None
,
initial_pos
=
None
):
if
pxds
is
None
:
pxds
=
{}
if
temps
is
None
:
temps
=
[]
if
pipeline
is
None
:
pipeline
=
[]
if
not
name
:
name
=
"(tree fragment)"
if
isinstance
(
code
,
_unicode
):
def
fmt
(
x
):
return
u"
\
n
"
.
join
(
strip_common_indent
(
x
.
split
(
u"
\
n
"
)))
...
...
@@ -233,7 +243,8 @@ class TreeFragment(object):
t
=
transform
(
t
)
self
.
root
=
t
elif
isinstance
(
code
,
Node
):
if
pxds
!=
{}:
raise
NotImplementedError
()
if
pxds
:
raise
NotImplementedError
()
self
.
root
=
code
else
:
raise
ValueError
(
"Unrecognized code format (accepts unicode and Node)"
)
...
...
@@ -242,11 +253,16 @@ class TreeFragment(object):
def
copy
(
self
):
return
copy_code_tree
(
self
.
root
)
def
substitute
(
self
,
nodes
=
{},
temps
=
[],
pos
=
None
):
def
substitute
(
self
,
nodes
=
None
,
temps
=
None
,
pos
=
None
):
if
nodes
is
None
:
nodes
=
{}
if
temps
is
None
:
temps
=
[]
return
TemplateTransform
()(
self
.
root
,
substitutions
=
nodes
,
temps
=
self
.
temps
+
temps
,
pos
=
pos
)
class
SetPosTransform
(
VisitorTransform
):
def
__init__
(
self
,
pos
):
super
(
SetPosTransform
,
self
).
__init__
()
...
...
Cython/Includes/cpython/exc.pxd
View file @
e01f6d7b
...
...
@@ -50,7 +50,7 @@ cdef extern from "Python.h":
# return value to a specific exception; use
# PyErr_ExceptionMatches() instead, shown below. (The comparison
# could easily fail since the exception may be an instance instead
# of a class, in the case of a class exception, or it may
th
e a
# of a class, in the case of a class exception, or it may
b
e a
# subclass of the expected exception.)
bint
PyErr_ExceptionMatches
(
object
exc
)
...
...
Cython/Includes/cpython/unicode.pxd
View file @
e01f6d7b
...
...
@@ -25,57 +25,60 @@ cdef extern from *:
char
*
PyUnicode_AS_DATA
(
object
o
)
# Return 1 or 0 depending on whether ch is a whitespace character.
bint
Py_UNICODE_ISSPACE
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISSPACE
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a lowercase character.
bint
Py_UNICODE_ISLOWER
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISLOWER
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is an uppercase character.
bint
Py_UNICODE_ISUPPER
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISUPPER
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a titlecase character.
bint
Py_UNICODE_ISTITLE
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISTITLE
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a linebreak character.
bint
Py_UNICODE_ISLINEBREAK
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISLINEBREAK
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a decimal character.
bint
Py_UNICODE_ISDECIMAL
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISDECIMAL
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a digit character.
bint
Py_UNICODE_ISDIGIT
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISDIGIT
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is a numeric character.
bint
Py_UNICODE_ISNUMERIC
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISNUMERIC
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is an alphabetic character.
bint
Py_UNICODE_ISALPHA
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISALPHA
(
Py_U
CS4
ch
)
# Return 1 or 0 depending on whether ch is an alphanumeric character.
bint
Py_UNICODE_ISALNUM
(
Py_U
NICODE
ch
)
bint
Py_UNICODE_ISALNUM
(
Py_U
CS4
ch
)
# Return the character ch converted to lower case.
Py_UNICODE
Py_UNICODE_TOLOWER
(
Py_UNICODE
ch
)
# Used to return a Py_UNICODE value before Py3.3.
Py_UCS4
Py_UNICODE_TOLOWER
(
Py_UCS4
ch
)
# Return the character ch converted to upper case.
Py_UNICODE
Py_UNICODE_TOUPPER
(
Py_UNICODE
ch
)
# Used to return a Py_UNICODE value before Py3.3.
Py_UCS4
Py_UNICODE_TOUPPER
(
Py_UCS4
ch
)
# Return the character ch converted to title case.
Py_UNICODE
Py_UNICODE_TOTITLE
(
Py_UNICODE
ch
)
# Used to return a Py_UNICODE value before Py3.3.
Py_UCS4
Py_UNICODE_TOTITLE
(
Py_UCS4
ch
)
# Return the character ch converted to a decimal positive
# integer. Return -1 if this is not possible. This macro does not
# raise exceptions.
int
Py_UNICODE_TODECIMAL
(
Py_U
NICODE
ch
)
int
Py_UNICODE_TODECIMAL
(
Py_U
CS4
ch
)
# Return the character ch converted to a single digit
# integer. Return -1 if this is not possible. This macro does not
# raise exceptions.
int
Py_UNICODE_TODIGIT
(
Py_U
NICODE
ch
)
int
Py_UNICODE_TODIGIT
(
Py_U
CS4
ch
)
# Return the character ch converted to a double. Return -1.0 if
# this is not possible. This macro does not raise exceptions.
double
Py_UNICODE_TONUMERIC
(
Py_U
NICODE
ch
)
double
Py_UNICODE_TONUMERIC
(
Py_U
CS4
ch
)
# To create Unicode objects and access their basic sequence
# properties, use these APIs:
...
...
Cython/Includes/libcpp/algorithm.pxd
View file @
e01f6d7b
...
...
@@ -26,3 +26,7 @@ cdef extern from "<algorithm>" namespace "std" nogil:
void
sort_heap
[
Iter
](
Iter
first
,
Iter
last
)
void
sort_heap
[
Iter
,
Compare
](
Iter
first
,
Iter
last
,
Compare
comp
)
# Copy
OutputIter
copy
[
InputIter
,
OutputIter
](
InputIter
,
InputIter
,
OutputIter
)
Cython/Includes/libcpp/limits.pxd
0 → 100644
View file @
e01f6d7b
cdef
extern
from
"limits"
namespace
"std"
nogil
:
enum
float_round_style
:
round_indeterminate
=
-
1
round_toward_zero
=
0
round_to_nearest
=
1
round_toward_infinity
=
2
round_toward_neg_infinity
=
3
enum
float_denorm_style
:
denorm_indeterminate
=
-
1
denorm_absent
=
0
denorm_present
=
1
#The static methods can be called as, e.g. numeric_limits[int].round_error(), etc.
#The const data members should be declared as static. Cython currently doesn't allow that
#and/or I can't figure it out, so you must instantiate an object to access, e.g.
#cdef numeric_limits[double] lm
#print lm.round_style
cdef
cppclass
numeric_limits
[
T
]:
const
bint
is_specialized
@
staticmethod
T
min
()
@
staticmethod
T
max
()
const
int
digits
const
int
digits10
const
bint
is_signed
const
bint
is_integer
const
bint
is_exact
const
int
radix
@
staticmethod
T
epsilon
()
@
staticmethod
T
round_error
()
const
int
min_exponent
const
int
min_exponent10
const
int
max_exponent
const
int
max_exponent10
const
bint
has_infinity
const
bint
has_quiet_NaN
const
bint
has_signaling_NaN
const
float_denorm_style
has_denorm
const
bint
has_denorm_loss
@
staticmethod
T
infinity
()
@
staticmethod
T
quiet_NaN
()
@
staticmethod
T
signaling_NaN
()
@
staticmethod
T
denorm_min
()
const
bint
is_iec559
const
bint
is_bounded
const
bint
is_modulo
const
bint
traps
const
bint
tinyness_before
const
float_round_style
round_style
Cython/Shadow.py
View file @
e01f6d7b
...
...
@@ -102,9 +102,21 @@ class _EmptyDecoratorAndManager(object):
def
__exit__
(
self
,
exc_type
,
exc_value
,
traceback
):
pass
class
_Optimization
(
object
):
pass
cclass
=
ccall
=
cfunc
=
_EmptyDecoratorAndManager
()
returns
=
wraparound
=
boundscheck
=
profile
=
freelist
=
lambda
arg
:
_EmptyDecoratorAndManager
()
returns
=
wraparound
=
boundscheck
=
initializedcheck
=
nonecheck
=
\
overflowcheck
=
embedsignature
=
cdivision
=
cdivision_warnings
=
\
always_allows_keywords
=
profile
=
linetrace
=
infer_type
=
\
type_version_tag
=
unraisable_tracebacks
=
freelist
=
\
lambda
arg
:
_EmptyDecoratorAndManager
()
optimization
=
_Optimization
()
overflowcheck
.
fold
=
optimization
.
use_switch
=
\
optimization
.
unpack_method_calls
=
lambda
arg
:
_EmptyDecoratorAndManager
()
final
=
internal
=
type_version_tag
=
no_gc_clear
=
_empty_decorator
...
...
@@ -136,7 +148,9 @@ def cmod(a, b):
# Emulated language constructs
def
cast
(
type
,
*
args
):
def
cast
(
type
,
*
args
,
**
kwargs
):
kwargs
.
pop
(
'typecheck'
,
None
)
assert
not
kwargs
if
hasattr
(
type
,
'__call__'
):
return
type
(
*
args
)
else
:
...
...
Cython/TestUtils.py
View file @
e01f6d7b
...
...
@@ -84,10 +84,15 @@ class CythonTest(unittest.TestCase):
self
.
assertNotEqual
(
TreePath
.
find_first
(
result_tree
,
path
),
None
,
"Path '%s' not found in result tree"
%
path
)
def
fragment
(
self
,
code
,
pxds
=
{},
pipeline
=
[]
):
def
fragment
(
self
,
code
,
pxds
=
None
,
pipeline
=
None
):
"Simply create a tree fragment using the name of the test-case in parse errors."
if
pxds
is
None
:
pxds
=
{}
if
pipeline
is
None
:
pipeline
=
[]
name
=
self
.
id
()
if
name
.
startswith
(
"__main__."
):
name
=
name
[
len
(
"__main__."
):]
if
name
.
startswith
(
"__main__."
):
name
=
name
[
len
(
"__main__."
):]
name
=
name
.
replace
(
"."
,
"_"
)
return
TreeFragment
(
code
,
name
,
pxds
,
pipeline
=
pipeline
)
...
...
@@ -139,7 +144,9 @@ class TransformTest(CythonTest):
Plans: One could have a pxd dictionary parameter to run_pipeline.
"""
def
run_pipeline
(
self
,
pipeline
,
pyx
,
pxds
=
{}):
def
run_pipeline
(
self
,
pipeline
,
pyx
,
pxds
=
None
):
if
pxds
is
None
:
pxds
=
{}
tree
=
self
.
fragment
(
pyx
,
pxds
).
root
# Run pipeline
for
T
in
pipeline
:
...
...
Cython/Utility/MemoryView.pyx
View file @
e01f6d7b
...
...
@@ -300,6 +300,24 @@ cdef void *align_pointer(void *memory, size_t alignment) nogil:
return
<
void
*>
aligned_p
# pre-allocate thread locks for reuse
## note that this could be implemented in a more beautiful way in "normal" Cython,
## but this code gets merged into the user module and not everything works there.
DEF
THREAD_LOCKS_PREALLOCATED
=
8
cdef
int
__pyx_memoryview_thread_locks_used
=
0
cdef
PyThread_type_lock
[
THREAD_LOCKS_PREALLOCATED
]
__pyx_memoryview_thread_locks
=
[
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
PyThread_allocate_lock
(),
]
@
cname
(
'__pyx_memoryview'
)
cdef
class
memoryview
(
object
):
...
...
@@ -325,12 +343,17 @@ cdef class memoryview(object):
(
<
__pyx_buffer
*>
&
self
.
view
).
obj
=
Py_None
Py_INCREF
(
Py_None
)
self
.
lock
=
PyThread_allocate_lock
()
if
self
.
lock
==
NULL
:
raise
MemoryError
global
__pyx_memoryview_thread_locks_used
if
__pyx_memoryview_thread_locks_used
<
THREAD_LOCKS_PREALLOCATED
:
self
.
lock
=
__pyx_memoryview_thread_locks
[
__pyx_memoryview_thread_locks_used
]
__pyx_memoryview_thread_locks_used
+=
1
if
self
.
lock
is
NULL
:
self
.
lock
=
PyThread_allocate_lock
()
if
self
.
lock
is
NULL
:
raise
MemoryError
if
flags
&
PyBUF_FORMAT
:
self
.
dtype_is_object
=
self
.
view
.
format
==
b'O'
self
.
dtype_is_object
=
(
self
.
view
.
format
[
0
]
==
b'O'
and
self
.
view
.
format
[
1
]
==
b'
\
0
'
)
else
:
self
.
dtype_is_object
=
dtype_is_object
...
...
@@ -342,8 +365,18 @@ cdef class memoryview(object):
if
self
.
obj
is
not
None
:
__Pyx_ReleaseBuffer
(
&
self
.
view
)
cdef
int
i
global
__pyx_memoryview_thread_locks_used
if
self
.
lock
!=
NULL
:
PyThread_free_lock
(
self
.
lock
)
for
i
in
range
(
__pyx_memoryview_thread_locks_used
):
if
__pyx_memoryview_thread_locks
[
i
]
is
self
.
lock
:
__pyx_memoryview_thread_locks_used
-=
1
if
i
!=
__pyx_memoryview_thread_locks_used
:
__pyx_memoryview_thread_locks
[
i
],
__pyx_memoryview_thread_locks
[
__pyx_memoryview_thread_locks_used
]
=
(
__pyx_memoryview_thread_locks
[
__pyx_memoryview_thread_locks_used
],
__pyx_memoryview_thread_locks
[
i
])
break
else
:
PyThread_free_lock
(
self
.
lock
)
cdef
char
*
get_item_pointer
(
memoryview
self
,
object
index
)
except
NULL
:
cdef
Py_ssize_t
dim
...
...
docs/src/reference/compilation.rst
View file @
e01f6d7b
...
...
@@ -415,7 +415,7 @@ Configurable optimisations
``optimize.unpack_method_calls`` (True / False)
Cython can generate code that optimistically checks for Python method objects
at call time and unpacks the underlying function to call it directly. This
can substantially speed up method calls, especially for bultins, but may also
can substantially speed up method calls, especially for bu
i
ltins, but may also
have a slight negative performance impact in some cases where the guess goes
completely wrong.
Disabling this option can also reduce the code size. Default is True.
...
...
docs/src/reference/extension_types.rst
View file @
e01f6d7b
...
...
@@ -314,11 +314,11 @@ Subclassing
* If the base type is a built-in type, it must have been previously declared as an ``extern`` extension type.
* ``cimport`` can be used to import the base type, if the extern declared base type is in a ``.pxd`` definition file.
* In Cython, multiple inheritance is not permitted.. sing
l
ular inheritance only
* In Cython, multiple inheritance is not permitted.. singular inheritance only
* Cython extens
t
ion types can also be sub-classed in Python.
* Cython extension types can also be sub-classed in Python.
* Here multiple inhertance is permissible as is normal for Python.
* Here multiple inher
i
tance is permissible as is normal for Python.
* Even multiple extension types may be inherited, but C-layout of all the base classes must be compatible.
...
...
@@ -448,7 +448,7 @@ External
print "Imag:", c.cval.imag
.. note:: Some important things in the example:
#. ``ctypedef`` has been used because
because Python's header file has the struct deca
lared with::
#. ``ctypedef`` has been used because
Python's header file has the struct dec
lared with::
ctypedef struct {
...
...
...
docs/src/reference/language_basics.rst
View file @
e01f6d7b
...
...
@@ -66,7 +66,7 @@ cimport
* Use the **cimport** statement, as you would Python's import statement, to access these files
from other definition or implementation files.
* **cimport** does not need to be called in ``.pyx`` file for
for
``.pxd`` file that has the
* **cimport** does not need to be called in ``.pyx`` file for ``.pxd`` file that has the
same name, as they are already in the same namespace.
* For cimport to find the stated definition file, the path to the file must be appended to the
``-I`` option of the **Cython compile command**.
...
...
@@ -705,7 +705,7 @@ Error and Exception Handling
.. note:: Python Objects
* Declared exception values are **not** need.
* Remember that Cython assumes that a function
function
without a declared return value, returns a Python object.
* Remember that Cython assumes that a function without a declared return value, returns a Python object.
* Exceptions on such functions are implicitly propagated by returning ``NULL``
.. note:: C++
...
...
docs/src/tutorial/numpy.rst
View file @
e01f6d7b
...
...
@@ -73,7 +73,7 @@ run a Python session to test both the Python version (imported from
[2, 2, 2],
[1, 1, 1]])
In [4]: import convolve1
In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int),
In [4]: convolve1.naive_convolve(np.array([[1, 1, 1]], dtype=np.int),
... np.array([[1],[2],[1]], dtype=np.int))
Out [4]:
array([[1, 1, 1],
...
...
@@ -196,7 +196,7 @@ These are the needed changes::
def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g):
...
cdef np.ndarray[DTYPE_t, ndim=2] h = ...
Usage:
.. sourcecode:: ipython
...
...
@@ -227,42 +227,20 @@ The array lookups are still slowed down by two factors:
...
cimport cython
@cython.boundscheck(False) # turn of bounds-checking for entire function
@cython.boundscheck(False) # turn off bounds-checking for entire function
@cython.wraparound(False) # turn off negative index wrapping for entire function
def naive_convolve(np.ndarray[DTYPE_t, ndim=2] f, np.ndarray[DTYPE_t, ndim=2] g):
...
Now bounds checking is not performed (and, as a side-effect, if you ''do''
happen to access out of bounds you will in the best case crash your program
and in the worst case corrupt data). It is possible to switch bounds-checking
mode in many ways, see :ref:`compiler-directives` for more
information.
Negative indices are dealt with by ensuring Cython that the indices will be
positive, by casting the variables to unsigned integer types (if you do have
negative values, then this casting will create a very large positive value
instead and you will attempt to access out-of-bounds values). Casting is done
with a special ``<>``-syntax. The code below is changed to use either
unsigned ints or casting as appropriate::
...
cdef int s, t # changed
cdef unsigned int x, y, v, w # changed
cdef int s_from, s_to, t_from, t_to
cdef DTYPE_t value
for x in range(xmax):
for y in range(ymax):
s_from = max(smid - x, -smid)
s_to = min((xmax - x) - smid, smid + 1)
t_from = max(tmid - y, -tmid)
t_to = min((ymax - y) - tmid, tmid + 1)
value = 0
for s in range(s_from, s_to):
for t in range(t_from, t_to):
v = <unsigned int>(x - smid + s) # changed
w = <unsigned int>(y - tmid + t) # changed
value += g[<unsigned int>(smid - s), <unsigned int>(tmid - t)] * f[v, w] # changed
h[x, y] = value
...
Also, we've disabled the check to wrap negative indices (e.g. g[-1] giving
the last value). As with disabling bounds checking, bad things will happen
if we try to actually use negative indices with this disabled.
The function call overhead now starts to play a role, so we compare the latter
two examples with larger N:
...
...
@@ -310,4 +288,3 @@ There is some speed penalty to this though (as one makes more assumptions
compile-time if the type is set to :obj:`np.ndarray`, specifically it is
assumed that the data is stored in pure strided mode and not in indirect
mode).
docs/src/tutorial/profiling_tutorial.rst
View file @
e01f6d7b
...
...
@@ -164,7 +164,7 @@ write a short script to profile our code::
Running this on my box gives the following output::
TODO: how to display this not as code but verbatim
ly?
TODO: how to display this not as code but verbatim
?
Sat Nov 7 17:40:54 2009 Profile.prof
...
...
docs/src/tutorial/pure.rst
View file @
e01f6d7b
...
...
@@ -287,6 +287,15 @@ Further Cython functions and declarations
T = cython.typedef(cython.p_int) # ctypedef int* T
* ``cast`` will (unsafely) reinterpret an expression type. ``cython.cast(T, t)``
is equivalent to ``<T>t``. The first attribute must be a type, the second is
the expression to cast. Specifying the optional keyword argument
``typecheck=True`` has the semantics of ``<T?>t``.
::
t1 = cython.cast(T, t)
t2 = cython.cast(T, t, typecheck=True)
.. _magic_attributes_pxd:
...
...
docs/src/userguide/extension_types.rst
View file @
e01f6d7b
...
...
@@ -132,7 +132,7 @@ you can use a cast to write::
This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it
will try to access width as a C struct member which may not exist. At the C level,
rather than raising an :class:`AttributeError`, either an nonsensical result will be
returned (interpreting whatever data is at
at that address as an int) or a segfault
returned (interpreting whatever data is at
that address as an int) or a segfault
may result from trying to access invalid memory. Instead, one can write::
print (<Shrubbery?>quest()).width
...
...
@@ -649,7 +649,7 @@ When you declare::
the name Spam serves both these roles. There may be other names by which you
can refer to the constructor, but only Spam can be used as a type name. For
example, if you were to explicity import MyModule, you could use
example, if you were to explicit
l
y import MyModule, you could use
``MyModule.Spam()`` to create a Spam instance, but you wouldn't be able to use
:class:`MyModule.Spam` as a type name.
...
...
docs/src/userguide/memoryviews.rst
View file @
e01f6d7b
...
...
@@ -649,7 +649,7 @@ None Slices
===========
Although memoryview slices are not objects they can be set to None and they can
be
be
checked for being None as well::
be checked for being None as well::
def func(double[:] myarray = None):
print(myarray is None)
...
...
docs/src/userguide/parallelism.rst
View file @
e01f6d7b
...
...
@@ -94,7 +94,7 @@ It currently supports OpenMP, but later on more backends might be supported.
The ``chunksize`` argument indicates the chunksize to be used for dividing the iterations among threads.
This is only valid for ``static``, ``dynamic`` and ``guided`` scheduling, and is optional. Different chunksizes
may give substatially different performance results, depending on the schedule, the load balance it provides,
may give substa
n
tially different performance results, depending on the schedule, the load balance it provides,
the scheduling overhead and the amount of false sharing (if any).
Example with a reduction::
...
...
docs/src/userguide/wrapping_CPlusPlus.rst
View file @
e01f6d7b
...
...
@@ -166,7 +166,7 @@ version 0.17, Cython also allows to pass external source files into the
And in the .pyx source file, write this into the first comment block, before
any source code, to compile it in C++ mode and link it statically against the
:file:`Rectange.cpp` code file::
:file:`Rectang
l
e.cpp` code file::
# distutils: language = c++
# distutils: sources = Rectangle.cpp
...
...
pyximport/pyxbuild.py
View file @
e01f6d7b
...
...
@@ -19,9 +19,9 @@ DEBUG = 0
_reloads
=
{}
def
pyx_to_dll
(
filename
,
ext
=
None
,
force_rebuild
=
0
,
build_in_temp
=
False
,
pyxbuild_dir
=
None
,
setup_args
=
{}
,
reload_support
=
False
,
inplace
=
False
):
def
pyx_to_dll
(
filename
,
ext
=
None
,
force_rebuild
=
0
,
build_in_temp
=
False
,
pyxbuild_dir
=
None
,
setup_args
=
None
,
reload_support
=
False
,
inplace
=
False
):
"""Compile a PYX file to a DLL and return the name of the generated .so
or .dll ."""
assert
os
.
path
.
exists
(
filename
),
"Could not find %s"
%
os
.
path
.
abspath
(
filename
)
...
...
@@ -35,6 +35,8 @@ def pyx_to_dll(filename, ext = None, force_rebuild = 0,
filename
=
filename
[:
-
len
(
extension
)]
+
'.c'
ext
=
Extension
(
name
=
modname
,
sources
=
[
filename
])
if
setup_args
is
None
:
setup_args
=
{}
if
not
pyxbuild_dir
:
pyxbuild_dir
=
os
.
path
.
join
(
path
,
"_pyxbld"
)
...
...
@@ -151,6 +153,7 @@ def pyx_to_dll(filename, ext = None, force_rebuild = 0,
sys
.
stderr
.
write
(
error
+
"
\
n
"
)
raise
if
__name__
==
"__main__"
:
pyx_to_dll
(
"dummy.pyx"
)
from
.
import
test
...
...
pyximport/pyximport.py
View file @
e01f6d7b
...
...
@@ -62,19 +62,23 @@ PYXBLD_EXT = ".pyxbld"
DEBUG_IMPORT
=
False
def
_print
(
message
,
args
):
if
args
:
message
=
message
%
args
print
(
message
)
def
_debug
(
message
,
*
args
):
if
DEBUG_IMPORT
:
_print
(
message
,
args
)
def
_info
(
message
,
*
args
):
_print
(
message
,
args
)
# Performance problem: for every PYX file that is imported, we will
# Performance problem: for every PYX file that is imported, we will
# invoke the whole distutils infrastructure even if the module is
# already built. It might be more efficient to only do it when the
# mod time of the .pyx is newer than the mod time of the .so but
...
...
@@ -84,6 +88,7 @@ def _info(message, *args):
def
_load_pyrex
(
name
,
filename
):
"Load a pyrex file given a name and filename."
def
get_distutils_extension
(
modname
,
pyxfilename
,
language_level
=
None
):
# try:
# import hashlib
...
...
@@ -103,6 +108,7 @@ def get_distutils_extension(modname, pyxfilename, language_level=None):
extension_mod
.
cython_directives
=
{
'language_level'
:
language_level
}
return
extension_mod
,
setup_args
def
handle_special_build
(
modname
,
pyxfilename
):
special_build
=
os
.
path
.
splitext
(
pyxfilename
)[
0
]
+
PYXBLD_EXT
ext
=
None
...
...
@@ -116,9 +122,8 @@ def handle_special_build(modname, pyxfilename):
make_ext
=
getattr
(
mod
,
'make_ext'
,
None
)
if
make_ext
:
ext
=
make_ext
(
modname
,
pyxfilename
)
assert
ext
and
ext
.
sources
,
(
"make_ext in %s did not return Extension"
%
special_build
)
make_setup_args
=
getattr
(
mod
,
'make_setup_args'
,
None
)
assert
ext
and
ext
.
sources
,
"make_ext in %s did not return Extension"
%
special_build
make_setup_args
=
getattr
(
mod
,
'make_setup_args'
,
None
)
if
make_setup_args
:
setup_args
=
make_setup_args
()
assert
isinstance
(
setup_args
,
dict
),
(
"make_setup_args in %s did not return a dict"
...
...
@@ -129,6 +134,7 @@ def handle_special_build(modname, pyxfilename):
for
source
in
ext
.
sources
]
return
ext
,
setup_args
def
handle_dependencies
(
pyxfilename
):
testing
=
'_test_files'
in
globals
()
dependfile
=
os
.
path
.
splitext
(
pyxfilename
)[
0
]
+
PYXDEP_EXT
...
...
@@ -166,16 +172,16 @@ def handle_dependencies(pyxfilename):
if
testing
:
_test_files
.
append
(
file
)
def
build_module
(
name
,
pyxfilename
,
pyxbuild_dir
=
None
,
inplace
=
False
,
language_level
=
None
):
assert
os
.
path
.
exists
(
pyxfilename
),
(
"Path does not exist: %s"
%
pyxfilename
)
assert
os
.
path
.
exists
(
pyxfilename
),
"Path does not exist: %s"
%
pyxfilename
handle_dependencies
(
pyxfilename
)
extension_mod
,
setup_args
=
get_distutils_extension
(
name
,
pyxfilename
,
language_level
)
build_in_temp
=
pyxargs
.
build_in_temp
sargs
=
pyxargs
.
setup_args
.
copy
()
extension_mod
,
setup_args
=
get_distutils_extension
(
name
,
pyxfilename
,
language_level
)
build_in_temp
=
pyxargs
.
build_in_temp
sargs
=
pyxargs
.
setup_args
.
copy
()
sargs
.
update
(
setup_args
)
build_in_temp
=
sargs
.
pop
(
'build_in_temp'
,
build_in_temp
)
build_in_temp
=
sargs
.
pop
(
'build_in_temp'
,
build_in_temp
)
from
.
import
pyxbuild
so_path
=
pyxbuild
.
pyx_to_dll
(
pyxfilename
,
extension_mod
,
...
...
@@ -189,7 +195,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
junkpath
=
os
.
path
.
join
(
os
.
path
.
dirname
(
so_path
),
name
+
"_*"
)
#very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
junkstuff
=
glob
.
glob
(
junkpath
)
for
path
in
junkstuff
:
if
path
!=
so_path
:
if
path
!=
so_path
:
try
:
os
.
remove
(
path
)
except
IOError
:
...
...
@@ -197,6 +203,7 @@ def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_l
return
so_path
def
load_module
(
name
,
pyxfilename
,
pyxbuild_dir
=
None
,
is_package
=
False
,
build_inplace
=
False
,
language_level
=
None
,
so_path
=
None
):
try
:
...
...
@@ -314,6 +321,7 @@ class PyxImporter(object):
_debug
(
"%s not found"
%
fullname
)
return
None
class
PyImporter
(
PyxImporter
):
"""A meta-path importer for normal .py files.
"""
...
...
@@ -384,6 +392,7 @@ class PyImporter(PyxImporter):
self
.
blocked_modules
.
pop
()
return
importer
class
LibLoader
(
object
):
def
__init__
(
self
):
self
.
_libs
=
{}
...
...
@@ -404,6 +413,7 @@ class LibLoader(object):
_lib_loader
=
LibLoader
()
class
PyxLoader
(
object
):
def
__init__
(
self
,
fullname
,
path
,
init_path
=
None
,
pyxbuild_dir
=
None
,
inplace
=
False
,
language_level
=
None
):
...
...
@@ -442,7 +452,8 @@ class PyxArgs(object):
build_in_temp
=
True
setup_args
=
{}
#None
##pyxargs=None
##pyxargs=None
def
_have_importers
():
has_py_importer
=
False
...
...
@@ -456,8 +467,9 @@ def _have_importers():
return
has_py_importer
,
has_pyx_importer
def
install
(
pyximport
=
True
,
pyimport
=
False
,
build_dir
=
None
,
build_in_temp
=
True
,
setup_args
=
{}
,
reload_support
=
False
,
setup_args
=
None
,
reload_support
=
False
,
load_py_module_on_import_failure
=
False
,
inplace
=
False
,
language_level
=
None
):
"""Main entry point. Call this to install the .pyx import hook in
...
...
@@ -504,6 +516,8 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
The default is to use the language level of the current Python
runtime for .py files and Py2 for .pyx files.
"""
if
setup_args
is
None
:
setup_args
=
{}
if
not
build_dir
:
build_dir
=
os
.
path
.
join
(
os
.
path
.
expanduser
(
'~'
),
'.pyxbld'
)
...
...
@@ -532,6 +546,7 @@ def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
return
py_importer
,
pyx_importer
def
uninstall
(
py_importer
,
pyx_importer
):
"""
Uninstall an import hook.
...
...
@@ -546,6 +561,7 @@ def uninstall(py_importer, pyx_importer):
except
ValueError
:
pass
# MAIN
def
show_docs
():
...
...
@@ -559,5 +575,6 @@ def show_docs():
pass
help
(
__main__
)
if
__name__
==
'__main__'
:
show_docs
()
runtests.py
View file @
e01f6d7b
...
...
@@ -2019,6 +2019,8 @@ def runtests(options, cmd_args, coverage=None):
if options.system_pyregr and languages:
sys_pyregr_dir = os.path.join(sys.prefix, 'lib', 'python'+sys.version[:3], 'test')
if not os.path.isdir(sys_pyregr_dir):
sys_pyregr_dir = os.path.join(os.path.dirname(sys.executable), 'Lib', 'test') # source build
if os.path.isdir(sys_pyregr_dir):
filetests = TestBuilder(ROOTDIR, WORKDIR, selectors, exclude_selectors,
options.annotate_source, options.cleanup_workdir,
...
...
tests/errors/cdef_class_properties_decorated.pyx
0 → 100644
View file @
e01f6d7b
# mode: error
# ticket: 264
# tag: property, decorator
from
functools
import
wraps
def
wrap_func
(
f
):
@
wraps
(
f
)
def
wrap
(
*
args
,
**
kwargs
):
print
(
"WRAPPED"
)
return
f
(
*
args
,
**
kwargs
)
return
wrap
cdef
class
Prop
:
@
property
@
wrap_func
def
prop1
(
self
):
return
1
@
property
def
prop2
(
self
):
return
2
@
wrap_func
@
prop2
.
setter
def
prop2
(
self
,
value
):
pass
@
prop2
.
setter
@
wrap_func
def
prop2
(
self
,
value
):
pass
_ERRORS
=
"""
19:4: Property methods with additional decorators are not supported
27:4: Property methods with additional decorators are not supported
33:4: Property methods with additional decorators are not supported
"""
tests/run/cdef_class_property_decorator_T264.pyx
0 → 100644
View file @
e01f6d7b
# mode: run
# ticket: 264
# tag: property, decorator
cdef
class
Prop
:
"""
>>> p = Prop()
>>> p.prop
GETTING 'None'
>>> p.prop = 1
SETTING '1' (previously: 'None')
>>> p.prop
GETTING '1'
1
>>> p.prop = 2
SETTING '2' (previously: '1')
>>> p.prop
GETTING '2'
2
>>> del p.prop
DELETING '2'
>>> p.prop
GETTING 'None'
"""
cdef
_value
def
__init__
(
self
):
self
.
_value
=
None
@
property
def
prop
(
self
):
print
(
"FAIL"
)
return
0
@
prop
.
getter
def
prop
(
self
):
print
(
"FAIL"
)
@
property
def
prop
(
self
):
print
(
"GETTING '%s'"
%
self
.
_value
)
return
self
.
_value
@
prop
.
setter
def
prop
(
self
,
value
):
print
(
"SETTING '%s' (previously: '%s')"
%
(
value
,
self
.
_value
))
self
.
_value
=
value
@
prop
.
deleter
def
prop
(
self
):
print
(
"DELETING '%s'"
%
self
.
_value
)
self
.
_value
=
None
tests/run/libcpp_all.pyx
View file @
e01f6d7b
...
...
@@ -13,6 +13,7 @@ cimport libcpp.set
cimport
libcpp.stack
cimport
libcpp.vector
cimport
libcpp.complex
cimport
libcpp.limits
from
libcpp.deque
cimport
*
from
libcpp.list
cimport
*
...
...
@@ -23,6 +24,7 @@ from libcpp.set cimport *
from
libcpp.stack
cimport
*
from
libcpp.vector
cimport
*
from
libcpp.complex
cimport
*
from
libcpp.limits
cimport
*
cdef
libcpp
.
deque
.
deque
[
int
]
d1
=
deque
[
int
]()
cdef
libcpp
.
list
.
list
[
int
]
l1
=
list
[
int
]()
...
...
@@ -91,3 +93,17 @@ cdef const_vector_to_list(const vector[double]& cv):
lst
.
append
(
cython
.
operator
.
dereference
(
iter
))
cython
.
operator
.
preincrement
(
iter
)
return
lst
cdef
double
dmax
=
numeric_limits
[
double
].
max
()
cdef
double
dmin
=
numeric_limits
[
double
].
min
()
cdef
double
deps
=
numeric_limits
[
double
].
epsilon
()
cdef
double
dqnan
=
numeric_limits
[
double
].
quiet_NaN
()
cdef
double
dsnan
=
numeric_limits
[
double
].
signaling_NaN
()
cdef
double
dinf
=
numeric_limits
[
double
].
infinity
()
cdef
int
imax
=
numeric_limits
[
int
].
max
()
cdef
int
imin
=
numeric_limits
[
int
].
min
()
cdef
int
ieps
=
numeric_limits
[
int
].
epsilon
()
cdef
int
iqnan
=
numeric_limits
[
int
].
quiet_NaN
()
cdef
int
isnan
=
numeric_limits
[
int
].
signaling_NaN
()
cdef
int
iinf
=
numeric_limits
[
int
].
infinity
()
tests/run/locals.pyx
View file @
e01f6d7b
...
...
@@ -88,3 +88,18 @@ def locals_ctype_inferred():
cdef
int
*
p
=
NULL
b
=
p
return
'b'
in
locals
()
def
pass_on_locals
(
f
):
"""
>>> def print_locals(l, **kwargs):
... print(sorted(l))
>>> pass_on_locals(print_locals)
['f']
['f']
['f']
"""
f
(
locals
())
f
(
l
=
locals
())
f
(
l
=
locals
(),
a
=
1
)
tests/run/pure.pyx
View file @
e01f6d7b
...
...
@@ -49,6 +49,26 @@ def test_cast(x):
n
=
cython
.
cast
(
cython
.
int
,
x
)
return
n
@
cython
.
locals
(
as_list
=
list
)
def
test_cast_object
(
x
,
typecheck
):
"""
>>> test_cast_object([1, 2, 3], True)
[1, 2, 3]
>>> test_cast_object([1, 2, 3], False)
[1, 2, 3]
>>> test_cast_object((1, 2, 3), True)
Traceback (most recent call last):
...
TypeError: Expected list, got tuple
>>> test_cast_object((1, 2, 3), False)
(1, 2, 3)
"""
if
typecheck
:
as_list
=
cython
.
cast
(
list
,
x
,
typecheck
=
True
)
else
:
as_list
=
cython
.
cast
(
list
,
x
,
typecheck
=
False
)
return
as_list
@
cython
.
locals
(
x
=
cython
.
int
,
y
=
cython
.
p_int
)
def
test_address
(
x
):
"""
...
...
tests/run/pure_cdef_class_property_decorator_T264.pxd
0 → 100644
View file @
e01f6d7b
# mode: run
# ticket: 264
# tag: property, decorator
cdef
class
Prop
:
cdef
_value
tests/run/pure_cdef_class_property_decorator_T264.py
0 → 100644
View file @
e01f6d7b
# mode: run
# ticket: 264
# tag: property, decorator
class
Prop
(
object
):
"""
>>> p = Prop()
>>> p.prop
GETTING 'None'
>>> p.prop = 1
SETTING '1' (previously: 'None')
>>> p.prop
GETTING '1'
1
>>> p.prop = 2
SETTING '2' (previously: '1')
>>> p.prop
GETTING '2'
2
>>> del p.prop
DELETING '2'
>>> p.prop
GETTING 'None'
"""
def
__init__
(
self
):
self
.
_value
=
None
@
property
def
prop
(
self
):
print
(
"FAIL"
)
return
0
@
prop
.
getter
def
prop
(
self
):
print
(
"FAIL"
)
@
property
def
prop
(
self
):
print
(
"GETTING '%s'"
%
self
.
_value
)
return
self
.
_value
@
prop
.
setter
def
prop
(
self
,
value
):
print
(
"SETTING '%s' (previously: '%s')"
%
(
value
,
self
.
_value
))
self
.
_value
=
value
@
prop
.
deleter
def
prop
(
self
):
print
(
"DELETING '%s'"
%
self
.
_value
)
self
.
_value
=
None
tests/run/sequential_parallel.pyx
View file @
e01f6d7b
...
...
@@ -46,6 +46,34 @@ def test_descending_prange():
return
sum
def
test_prange_matches_range
(
int
start
,
int
stop
,
int
step
):
"""
>>> test_prange_matches_range(0, 8, 3)
>>> test_prange_matches_range(0, 9, 3)
>>> test_prange_matches_range(0, 10, 3)
>>> test_prange_matches_range(0, 10, -3)
>>> test_prange_matches_range(0, -10, -3)
>>> test_prange_matches_range(1, -10, -3)
>>> test_prange_matches_range(2, -10, -3)
>>> test_prange_matches_range(3, -10, -3)
"""
cdef
int
i
,
range_last
,
prange_last
prange_set
=
set
()
for
i
in
prange
(
start
,
stop
,
step
,
nogil
=
True
,
num_threads
=
3
):
prange_last
=
i
with
gil
:
prange_set
.
add
(
i
)
range_set
=
set
(
range
(
start
,
stop
,
step
))
assert
range_set
==
prange_set
,
"missing: %s extra %s"
%
(
sorted
(
range_set
-
prange_set
),
sorted
(
prange_set
-
range_set
))
for
ii
in
range
(
start
,
stop
,
step
):
range_last
=
ii
if
range_set
:
assert
prange_last
==
i
assert
range_last
==
prange_last
def
test_propagation
():
"""
>>> test_propagation()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment