Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Labels
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Commits
Open sidebar
nexedi
cython
Commits
5f531449
Commit
5f531449
authored
Jan 14, 2011
by
Robert Bradshaw
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Undo EOL whitespace additions.
parent
c5e16a3f
Changes
8
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
548 additions
and
548 deletions
+548
-548
Cython/Compiler/Main.py
Cython/Compiler/Main.py
+28
-28
Cython/Compiler/ParseTreeTransforms.py
Cython/Compiler/ParseTreeTransforms.py
+97
-97
Cython/Compiler/Tests/TestParseTreeTransforms.py
Cython/Compiler/Tests/TestParseTreeTransforms.py
+15
-15
Cython/Debugger/Tests/test_libcython_in_gdb.py
Cython/Debugger/Tests/test_libcython_in_gdb.py
+2
-2
Cython/Debugger/libcython.py
Cython/Debugger/libcython.py
+203
-203
Cython/Debugger/libpython.py
Cython/Debugger/libpython.py
+178
-178
runtests.py
runtests.py
+24
-24
setup.py
setup.py
+1
-1
No files found.
Cython/Compiler/Main.py
View file @
5f531449
...
@@ -20,7 +20,7 @@ import Code
...
@@ -20,7 +20,7 @@ import Code
import
Errors
import
Errors
# Do not import Parsing here, import it when needed, because Parsing imports
# Do not import Parsing here, import it when needed, because Parsing imports
# Nodes, which globally needs debug command line options initialized to set a
# Nodes, which globally needs debug command line options initialized to set a
# conditional metaclass. These options are processed by CmdLine called from
# conditional metaclass. These options are processed by CmdLine called from
# main() in this file.
# main() in this file.
# import Parsing
# import Parsing
import
Version
import
Version
...
@@ -72,7 +72,7 @@ class Context(object):
...
@@ -72,7 +72,7 @@ class Context(object):
# include_directories [string]
# include_directories [string]
# future_directives [object]
# future_directives [object]
# language_level int currently 2 or 3 for Python 2/3
# language_level int currently 2 or 3 for Python 2/3
def
__init__
(
self
,
include_directories
,
compiler_directives
,
cpp
=
False
,
language_level
=
2
):
def
__init__
(
self
,
include_directories
,
compiler_directives
,
cpp
=
False
,
language_level
=
2
):
import
Builtin
,
CythonScope
import
Builtin
,
CythonScope
self
.
modules
=
{
"__builtin__"
:
Builtin
.
builtin_scope
}
self
.
modules
=
{
"__builtin__"
:
Builtin
.
builtin_scope
}
...
@@ -89,7 +89,7 @@ class Context(object):
...
@@ -89,7 +89,7 @@ class Context(object):
self
.
include_directories
=
include_directories
+
[
standard_include_path
]
self
.
include_directories
=
include_directories
+
[
standard_include_path
]
self
.
set_language_level
(
language_level
)
self
.
set_language_level
(
language_level
)
self
.
gdb_debug_outputwriter
=
None
self
.
gdb_debug_outputwriter
=
None
def
set_language_level
(
self
,
level
):
def
set_language_level
(
self
,
level
):
...
@@ -124,12 +124,12 @@ class Context(object):
...
@@ -124,12 +124,12 @@ class Context(object):
else
:
else
:
_check_c_declarations
=
check_c_declarations
_check_c_declarations
=
check_c_declarations
_specific_post_parse
=
None
_specific_post_parse
=
None
if
py
and
not
pxd
:
if
py
and
not
pxd
:
_align_function_definitions
=
AlignFunctionDefinitions
(
self
)
_align_function_definitions
=
AlignFunctionDefinitions
(
self
)
else
:
else
:
_align_function_definitions
=
None
_align_function_definitions
=
None
return
[
return
[
NormalizeTree
(
self
),
NormalizeTree
(
self
),
PostParse
(
self
),
PostParse
(
self
),
...
@@ -194,7 +194,7 @@ class Context(object):
...
@@ -194,7 +194,7 @@ class Context(object):
debug_transform
=
[
DebugTransform
(
self
,
options
,
result
)]
debug_transform
=
[
DebugTransform
(
self
,
options
,
result
)]
else
:
else
:
debug_transform
=
[]
debug_transform
=
[]
return
list
(
itertools
.
chain
(
return
list
(
itertools
.
chain
(
[
create_parse
(
self
)],
[
create_parse
(
self
)],
self
.
create_pipeline
(
pxd
=
False
,
py
=
py
),
self
.
create_pipeline
(
pxd
=
False
,
py
=
py
),
...
@@ -218,7 +218,7 @@ class Context(object):
...
@@ -218,7 +218,7 @@ class Context(object):
return
[
parse_pxd
]
+
self
.
create_pipeline
(
pxd
=
True
)
+
[
return
[
parse_pxd
]
+
self
.
create_pipeline
(
pxd
=
True
)
+
[
ExtractPxdCode
(
self
),
ExtractPxdCode
(
self
),
]
]
def
create_py_pipeline
(
self
,
options
,
result
):
def
create_py_pipeline
(
self
,
options
,
result
):
return
self
.
create_pyx_pipeline
(
options
,
result
,
py
=
True
)
return
self
.
create_pyx_pipeline
(
options
,
result
,
py
=
True
)
...
@@ -227,7 +227,7 @@ class Context(object):
...
@@ -227,7 +227,7 @@ class Context(object):
pipeline
=
self
.
create_pxd_pipeline
(
scope
,
module_name
)
pipeline
=
self
.
create_pxd_pipeline
(
scope
,
module_name
)
result
=
self
.
run_pipeline
(
pipeline
,
source_desc
)
result
=
self
.
run_pipeline
(
pipeline
,
source_desc
)
return
result
return
result
def
nonfatal_error
(
self
,
exc
):
def
nonfatal_error
(
self
,
exc
):
return
Errors
.
report_error
(
exc
)
return
Errors
.
report_error
(
exc
)
...
@@ -257,7 +257,7 @@ class Context(object):
...
@@ -257,7 +257,7 @@ class Context(object):
error
=
err
error
=
err
return
(
error
,
data
)
return
(
error
,
data
)
def
find_module
(
self
,
module_name
,
def
find_module
(
self
,
module_name
,
relative_to
=
None
,
pos
=
None
,
need_pxd
=
1
):
relative_to
=
None
,
pos
=
None
,
need_pxd
=
1
):
# Finds and returns the module scope corresponding to
# Finds and returns the module scope corresponding to
# the given relative or absolute module name. If this
# the given relative or absolute module name. If this
...
@@ -327,7 +327,7 @@ class Context(object):
...
@@ -327,7 +327,7 @@ class Context(object):
except
CompileError
:
except
CompileError
:
pass
pass
return
scope
return
scope
def
find_pxd_file
(
self
,
qualified_name
,
pos
):
def
find_pxd_file
(
self
,
qualified_name
,
pos
):
# Search include path for the .pxd file corresponding to the
# Search include path for the .pxd file corresponding to the
# given fully-qualified module name.
# given fully-qualified module name.
...
@@ -362,7 +362,7 @@ class Context(object):
...
@@ -362,7 +362,7 @@ class Context(object):
# Search include path for the .pyx file corresponding to the
# Search include path for the .pyx file corresponding to the
# given fully-qualified module name, as for find_pxd_file().
# given fully-qualified module name, as for find_pxd_file().
return
self
.
search_include_directories
(
qualified_name
,
".pyx"
,
pos
)
return
self
.
search_include_directories
(
qualified_name
,
".pyx"
,
pos
)
def
find_include_file
(
self
,
filename
,
pos
):
def
find_include_file
(
self
,
filename
,
pos
):
# Search list of include directories for filename.
# Search list of include directories for filename.
# Reports an error and returns None if not found.
# Reports an error and returns None if not found.
...
@@ -371,7 +371,7 @@ class Context(object):
...
@@ -371,7 +371,7 @@ class Context(object):
if
not
path
:
if
not
path
:
error
(
pos
,
"'%s' not found"
%
filename
)
error
(
pos
,
"'%s' not found"
%
filename
)
return
path
return
path
def
search_include_directories
(
self
,
qualified_name
,
suffix
,
pos
,
def
search_include_directories
(
self
,
qualified_name
,
suffix
,
pos
,
include
=
False
):
include
=
False
):
# Search the list of include directories for the given
# Search the list of include directories for the given
...
@@ -452,15 +452,15 @@ class Context(object):
...
@@ -452,15 +452,15 @@ class Context(object):
if
dep_path
and
Utils
.
file_newer_than
(
dep_path
,
c_time
):
if
dep_path
and
Utils
.
file_newer_than
(
dep_path
,
c_time
):
return
1
return
1
return
0
return
0
def
find_cimported_module_names
(
self
,
source_path
):
def
find_cimported_module_names
(
self
,
source_path
):
return
[
name
for
kind
,
name
in
self
.
read_dependency_file
(
source_path
)
return
[
name
for
kind
,
name
in
self
.
read_dependency_file
(
source_path
)
if
kind
==
"cimport"
]
if
kind
==
"cimport"
]
def
is_package_dir
(
self
,
dir_path
):
def
is_package_dir
(
self
,
dir_path
):
# Return true if the given directory is a package directory.
# Return true if the given directory is a package directory.
for
filename
in
(
"__init__.py"
,
for
filename
in
(
"__init__.py"
,
"__init__.pyx"
,
"__init__.pyx"
,
"__init__.pxd"
):
"__init__.pxd"
):
path
=
os
.
path
.
join
(
dir_path
,
filename
)
path
=
os
.
path
.
join
(
dir_path
,
filename
)
if
Utils
.
path_exists
(
path
):
if
Utils
.
path_exists
(
path
):
...
@@ -486,7 +486,7 @@ class Context(object):
...
@@ -486,7 +486,7 @@ class Context(object):
# Find a top-level module, creating a new one if needed.
# Find a top-level module, creating a new one if needed.
scope
=
self
.
lookup_submodule
(
name
)
scope
=
self
.
lookup_submodule
(
name
)
if
not
scope
:
if
not
scope
:
scope
=
ModuleScope
(
name
,
scope
=
ModuleScope
(
name
,
parent_module
=
None
,
context
=
self
)
parent_module
=
None
,
context
=
self
)
self
.
modules
[
name
]
=
scope
self
.
modules
[
name
]
=
scope
return
scope
return
scope
...
@@ -606,7 +606,7 @@ def run_pipeline(source, options, full_module_name = None):
...
@@ -606,7 +606,7 @@ def run_pipeline(source, options, full_module_name = None):
# Set up result object
# Set up result object
result
=
create_default_resultobj
(
source
,
options
)
result
=
create_default_resultobj
(
source
,
options
)
# Get pipeline
# Get pipeline
if
source_ext
.
lower
()
==
'.py'
:
if
source_ext
.
lower
()
==
'.py'
:
pipeline
=
context
.
create_py_pipeline
(
options
,
result
)
pipeline
=
context
.
create_py_pipeline
(
options
,
result
)
...
@@ -617,7 +617,7 @@ def run_pipeline(source, options, full_module_name = None):
...
@@ -617,7 +617,7 @@ def run_pipeline(source, options, full_module_name = None):
err
,
enddata
=
context
.
run_pipeline
(
pipeline
,
source
)
err
,
enddata
=
context
.
run_pipeline
(
pipeline
,
source
)
context
.
teardown_errors
(
err
,
options
,
result
)
context
.
teardown_errors
(
err
,
options
,
result
)
return
result
return
result
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
#
...
@@ -638,7 +638,7 @@ class CompilationSource(object):
...
@@ -638,7 +638,7 @@ class CompilationSource(object):
class
CompilationOptions
(
object
):
class
CompilationOptions
(
object
):
"""
"""
Options to the Cython compiler:
Options to the Cython compiler:
show_version boolean Display version number
show_version boolean Display version number
use_listing_file boolean Generate a .lis file
use_listing_file boolean Generate a .lis file
errors_to_stderr boolean Echo errors to stderr when using .lis
errors_to_stderr boolean Echo errors to stderr when using .lis
...
@@ -653,10 +653,10 @@ class CompilationOptions(object):
...
@@ -653,10 +653,10 @@ class CompilationOptions(object):
compiler_directives dict Overrides for pragma options (see Options.py)
compiler_directives dict Overrides for pragma options (see Options.py)
evaluate_tree_assertions boolean Test support: evaluate parse tree assertions
evaluate_tree_assertions boolean Test support: evaluate parse tree assertions
language_level integer The Python language level: 2 or 3
language_level integer The Python language level: 2 or 3
cplus boolean Compile as c++ code
cplus boolean Compile as c++ code
"""
"""
def
__init__
(
self
,
defaults
=
None
,
**
kw
):
def
__init__
(
self
,
defaults
=
None
,
**
kw
):
self
.
include_path
=
[]
self
.
include_path
=
[]
if
defaults
:
if
defaults
:
...
@@ -675,7 +675,7 @@ class CompilationOptions(object):
...
@@ -675,7 +675,7 @@ class CompilationOptions(object):
class
CompilationResult
(
object
):
class
CompilationResult
(
object
):
"""
"""
Results from the Cython compiler:
Results from the Cython compiler:
c_file string or None The generated C source file
c_file string or None The generated C source file
h_file string or None The generated C header file
h_file string or None The generated C header file
i_file string or None The generated .pxi file
i_file string or None The generated .pxi file
...
@@ -686,7 +686,7 @@ class CompilationResult(object):
...
@@ -686,7 +686,7 @@ class CompilationResult(object):
num_errors integer Number of compilation errors
num_errors integer Number of compilation errors
compilation_source CompilationSource
compilation_source CompilationSource
"""
"""
def
__init__
(
self
):
def
__init__
(
self
):
self
.
c_file
=
None
self
.
c_file
=
None
self
.
h_file
=
None
self
.
h_file
=
None
...
@@ -703,10 +703,10 @@ class CompilationResultSet(dict):
...
@@ -703,10 +703,10 @@ class CompilationResultSet(dict):
Results from compiling multiple Pyrex source files. A mapping
Results from compiling multiple Pyrex source files. A mapping
from source file paths to CompilationResult instances. Also
from source file paths to CompilationResult instances. Also
has the following attributes:
has the following attributes:
num_errors integer Total number of compilation errors
num_errors integer Total number of compilation errors
"""
"""
num_errors
=
0
num_errors
=
0
def
add
(
self
,
source
,
result
):
def
add
(
self
,
source
,
result
):
...
@@ -717,7 +717,7 @@ class CompilationResultSet(dict):
...
@@ -717,7 +717,7 @@ class CompilationResultSet(dict):
def
compile_single
(
source
,
options
,
full_module_name
=
None
):
def
compile_single
(
source
,
options
,
full_module_name
=
None
):
"""
"""
compile_single(source, options, full_module_name)
compile_single(source, options, full_module_name)
Compile the given Pyrex implementation file and return a CompilationResult.
Compile the given Pyrex implementation file and return a CompilationResult.
Always compiles a single file; does not perform timestamp checking or
Always compiles a single file; does not perform timestamp checking or
recursion.
recursion.
...
@@ -728,7 +728,7 @@ def compile_single(source, options, full_module_name = None):
...
@@ -728,7 +728,7 @@ def compile_single(source, options, full_module_name = None):
def
compile_multiple
(
sources
,
options
):
def
compile_multiple
(
sources
,
options
):
"""
"""
compile_multiple(sources, options)
compile_multiple(sources, options)
Compiles the given sequence of Pyrex implementation files and returns
Compiles the given sequence of Pyrex implementation files and returns
a CompilationResultSet. Performs timestamp checking and/or recursion
a CompilationResultSet. Performs timestamp checking and/or recursion
if these are specified in the options.
if these are specified in the options.
...
@@ -766,7 +766,7 @@ def compile_multiple(sources, options):
...
@@ -766,7 +766,7 @@ def compile_multiple(sources, options):
def
compile
(
source
,
options
=
None
,
full_module_name
=
None
,
**
kwds
):
def
compile
(
source
,
options
=
None
,
full_module_name
=
None
,
**
kwds
):
"""
"""
compile(source [, options], [, <option> = <value>]...)
compile(source [, options], [, <option> = <value>]...)
Compile one or more Pyrex implementation files, with optional timestamp
Compile one or more Pyrex implementation files, with optional timestamp
checking and recursing on dependecies. The source argument may be a string
checking and recursing on dependecies. The source argument may be a string
or a sequence of strings If it is a string and no recursion or timestamp
or a sequence of strings If it is a string and no recursion or timestamp
...
...
Cython/Compiler/ParseTreeTransforms.py
View file @
5f531449
This diff is collapsed.
Click to expand it.
Cython/Compiler/Tests/TestParseTreeTransforms.py
View file @
5f531449
...
@@ -17,7 +17,7 @@ class TestNormalizeTree(TransformTest):
...
@@ -17,7 +17,7 @@ class TestNormalizeTree(TransformTest):
body: ExprStatNode
body: ExprStatNode
expr: NameNode
expr: NameNode
"""
,
self
.
treetypes
(
t
))
"""
,
self
.
treetypes
(
t
))
def
test_wrap_singlestat
(
self
):
def
test_wrap_singlestat
(
self
):
t
=
self
.
run_pipeline
([
NormalizeTree
(
None
)],
u"if x: y"
)
t
=
self
.
run_pipeline
([
NormalizeTree
(
None
)],
u"if x: y"
)
self
.
assertLines
(
u"""
self
.
assertLines
(
u"""
...
@@ -83,7 +83,7 @@ class TestNormalizeTree(TransformTest):
...
@@ -83,7 +83,7 @@ class TestNormalizeTree(TransformTest):
stats[0]: ExprStatNode
stats[0]: ExprStatNode
expr: NameNode
expr: NameNode
"""
,
self
.
treetypes
(
t
))
"""
,
self
.
treetypes
(
t
))
def
test_pass_eliminated
(
self
):
def
test_pass_eliminated
(
self
):
t
=
self
.
run_pipeline
([
NormalizeTree
(
None
)],
u"pass"
)
t
=
self
.
run_pipeline
([
NormalizeTree
(
None
)],
u"pass"
)
...
@@ -142,7 +142,7 @@ class TestWithTransform(object): # (TransformTest): # Disabled!
...
@@ -142,7 +142,7 @@ class TestWithTransform(object): # (TransformTest): # Disabled!
$0_2(None, None, None)
$0_2(None, None, None)
"""
,
t
)
"""
,
t
)
# TODO: Re-enable once they're more robust.
# TODO: Re-enable once they're more robust.
if
sys
.
version_info
[:
2
]
>=
(
2
,
5
)
and
False
:
if
sys
.
version_info
[:
2
]
>=
(
2
,
5
)
and
False
:
...
@@ -153,15 +153,15 @@ else:
...
@@ -153,15 +153,15 @@ else:
DebuggerTestCase
=
object
DebuggerTestCase
=
object
class
TestDebugTransform
(
DebuggerTestCase
):
class
TestDebugTransform
(
DebuggerTestCase
):
def
elem_hasattrs
(
self
,
elem
,
attrs
):
def
elem_hasattrs
(
self
,
elem
,
attrs
):
# we shall supporteth python 2.3 !
# we shall supporteth python 2.3 !
return
all
([
attr
in
elem
.
attrib
for
attr
in
attrs
])
return
all
([
attr
in
elem
.
attrib
for
attr
in
attrs
])
def
test_debug_info
(
self
):
def
test_debug_info
(
self
):
try
:
try
:
assert
os
.
path
.
exists
(
self
.
debug_dest
)
assert
os
.
path
.
exists
(
self
.
debug_dest
)
t
=
DebugWriter
.
etree
.
parse
(
self
.
debug_dest
)
t
=
DebugWriter
.
etree
.
parse
(
self
.
debug_dest
)
# the xpath of the standard ElementTree is primitive, don't use
# the xpath of the standard ElementTree is primitive, don't use
# anything fancy
# anything fancy
...
@@ -171,23 +171,23 @@ class TestDebugTransform(DebuggerTestCase):
...
@@ -171,23 +171,23 @@ class TestDebugTransform(DebuggerTestCase):
xml_globals
=
dict
(
xml_globals
=
dict
(
[(
e
.
attrib
[
'name'
],
e
.
attrib
[
'type'
])
for
e
in
L
])
[(
e
.
attrib
[
'name'
],
e
.
attrib
[
'type'
])
for
e
in
L
])
self
.
assertEqual
(
len
(
L
),
len
(
xml_globals
))
self
.
assertEqual
(
len
(
L
),
len
(
xml_globals
))
L
=
list
(
t
.
find
(
'/Module/Functions'
))
L
=
list
(
t
.
find
(
'/Module/Functions'
))
assert
L
assert
L
xml_funcs
=
dict
([(
e
.
attrib
[
'qualified_name'
],
e
)
for
e
in
L
])
xml_funcs
=
dict
([(
e
.
attrib
[
'qualified_name'
],
e
)
for
e
in
L
])
self
.
assertEqual
(
len
(
L
),
len
(
xml_funcs
))
self
.
assertEqual
(
len
(
L
),
len
(
xml_funcs
))
# test globals
# test globals
self
.
assertEqual
(
'CObject'
,
xml_globals
.
get
(
'c_var'
))
self
.
assertEqual
(
'CObject'
,
xml_globals
.
get
(
'c_var'
))
self
.
assertEqual
(
'PythonObject'
,
xml_globals
.
get
(
'python_var'
))
self
.
assertEqual
(
'PythonObject'
,
xml_globals
.
get
(
'python_var'
))
# test functions
# test functions
funcnames
=
(
'codefile.spam'
,
'codefile.ham'
,
'codefile.eggs'
,
funcnames
=
(
'codefile.spam'
,
'codefile.ham'
,
'codefile.eggs'
,
'codefile.closure'
,
'codefile.inner'
)
'codefile.closure'
,
'codefile.inner'
)
required_xml_attrs
=
'name'
,
'cname'
,
'qualified_name'
required_xml_attrs
=
'name'
,
'cname'
,
'qualified_name'
assert
all
([
f
in
xml_funcs
for
f
in
funcnames
])
assert
all
([
f
in
xml_funcs
for
f
in
funcnames
])
spam
,
ham
,
eggs
=
[
xml_funcs
[
funcname
]
for
funcname
in
funcnames
]
spam
,
ham
,
eggs
=
[
xml_funcs
[
funcname
]
for
funcname
in
funcnames
]
self
.
assertEqual
(
spam
.
attrib
[
'name'
],
'spam'
)
self
.
assertEqual
(
spam
.
attrib
[
'name'
],
'spam'
)
self
.
assertNotEqual
(
'spam'
,
spam
.
attrib
[
'cname'
])
self
.
assertNotEqual
(
'spam'
,
spam
.
attrib
[
'cname'
])
assert
self
.
elem_hasattrs
(
spam
,
required_xml_attrs
)
assert
self
.
elem_hasattrs
(
spam
,
required_xml_attrs
)
...
@@ -199,12 +199,12 @@ class TestDebugTransform(DebuggerTestCase):
...
@@ -199,12 +199,12 @@ class TestDebugTransform(DebuggerTestCase):
names
=
[
e
.
attrib
[
'name'
]
for
e
in
spam_locals
]
names
=
[
e
.
attrib
[
'name'
]
for
e
in
spam_locals
]
self
.
assertEqual
(
list
(
'abcd'
),
names
)
self
.
assertEqual
(
list
(
'abcd'
),
names
)
assert
self
.
elem_hasattrs
(
spam_locals
[
0
],
required_xml_attrs
)
assert
self
.
elem_hasattrs
(
spam_locals
[
0
],
required_xml_attrs
)
# test arguments of functions
# test arguments of functions
spam_arguments
=
list
(
spam
.
find
(
'Arguments'
))
spam_arguments
=
list
(
spam
.
find
(
'Arguments'
))
assert
spam_arguments
assert
spam_arguments
self
.
assertEqual
(
1
,
len
(
list
(
spam_arguments
)))
self
.
assertEqual
(
1
,
len
(
list
(
spam_arguments
)))
# test step-into functions
# test step-into functions
step_into
=
spam
.
find
(
'StepIntoFunctions'
)
step_into
=
spam
.
find
(
'StepIntoFunctions'
)
spam_stepinto
=
[
x
.
attrib
[
'name'
]
for
x
in
step_into
]
spam_stepinto
=
[
x
.
attrib
[
'name'
]
for
x
in
step_into
]
...
@@ -215,10 +215,10 @@ class TestDebugTransform(DebuggerTestCase):
...
@@ -215,10 +215,10 @@ class TestDebugTransform(DebuggerTestCase):
except
:
except
:
print
open
(
self
.
debug_dest
).
read
()
print
open
(
self
.
debug_dest
).
read
()
raise
raise
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
import
unittest
import
unittest
...
...
Cython/Debugger/Tests/test_libcython_in_gdb.py
View file @
5f531449
...
@@ -385,7 +385,7 @@ class TestClosure(DebugTestCase):
...
@@ -385,7 +385,7 @@ class TestClosure(DebugTestCase):
def
test_inner
(
self
):
def
test_inner
(
self
):
self
.
break_and_run_func
(
'inner'
)
self
.
break_and_run_func
(
'inner'
)
self
.
assertEqual
(
''
,
gdb
.
execute
(
'cy locals'
,
to_string
=
True
))
self
.
assertEqual
(
''
,
gdb
.
execute
(
'cy locals'
,
to_string
=
True
))
# Allow the Cython-generated code to initialize the scope variable
# Allow the Cython-generated code to initialize the scope variable
gdb
.
execute
(
'cy step'
)
gdb
.
execute
(
'cy step'
)
...
@@ -400,7 +400,7 @@ class TestClosure(DebugTestCase):
...
@@ -400,7 +400,7 @@ class TestClosure(DebugTestCase):
# Initialize scope with 'a' uninitialized
# Initialize scope with 'a' uninitialized
gdb
.
execute
(
'cy step'
)
gdb
.
execute
(
'cy step'
)
self
.
assertEqual
(
''
,
gdb
.
execute
(
'cy locals'
,
to_string
=
True
))
self
.
assertEqual
(
''
,
gdb
.
execute
(
'cy locals'
,
to_string
=
True
))
# Initialize 'a' to 1
# Initialize 'a' to 1
gdb
.
execute
(
'cy step'
)
gdb
.
execute
(
'cy step'
)
print_result
=
gdb
.
execute
(
'cy print a'
,
to_string
=
True
).
strip
()
print_result
=
gdb
.
execute
(
'cy print a'
,
to_string
=
True
).
strip
()
...
...
Cython/Debugger/libcython.py
View file @
5f531449
This diff is collapsed.
Click to expand it.
Cython/Debugger/libpython.py
View file @
5f531449
This diff is collapsed.
Click to expand it.
runtests.py
View file @
5f531449
...
@@ -85,7 +85,7 @@ class build_ext(_build_ext):
...
@@ -85,7 +85,7 @@ class build_ext(_build_ext):
def build_extension(self, ext):
def build_extension(self, ext):
if ext.language == '
c
++
':
if ext.language == '
c
++
':
try:
try:
try: # Py2.7+ & Py3.2+
try: # Py2.7+ & Py3.2+
compiler_obj = self.compiler_obj
compiler_obj = self.compiler_obj
except AttributeError:
except AttributeError:
compiler_obj = self.compiler
compiler_obj = self.compiler
...
@@ -353,17 +353,17 @@ class CythonCompileTestCase(unittest.TestCase):
...
@@ -353,17 +353,17 @@ class CythonCompileTestCase(unittest.TestCase):
source
=
self
.
find_module_source_file
(
source
=
self
.
find_module_source_file
(
os
.
path
.
join
(
test_directory
,
module
+
'.pyx'
))
os
.
path
.
join
(
test_directory
,
module
+
'.pyx'
))
target
=
os
.
path
.
join
(
targetdir
,
self
.
build_target_filename
(
module
))
target
=
os
.
path
.
join
(
targetdir
,
self
.
build_target_filename
(
module
))
if
extra_compile_options
is
None
:
if
extra_compile_options
is
None
:
extra_compile_options
=
{}
extra_compile_options
=
{}
try
:
try
:
CompilationOptions
CompilationOptions
except
NameError
:
except
NameError
:
from
Cython.Compiler.Main
import
CompilationOptions
from
Cython.Compiler.Main
import
CompilationOptions
from
Cython.Compiler.Main
import
compile
as
cython_compile
from
Cython.Compiler.Main
import
compile
as
cython_compile
from
Cython.Compiler.Main
import
default_options
from
Cython.Compiler.Main
import
default_options
options
=
CompilationOptions
(
options
=
CompilationOptions
(
default_options
,
default_options
,
include_path
=
include_dirs
,
include_path
=
include_dirs
,
...
@@ -379,7 +379,7 @@ class CythonCompileTestCase(unittest.TestCase):
...
@@ -379,7 +379,7 @@ class CythonCompileTestCase(unittest.TestCase):
cython_compile
(
source
,
options
=
options
,
cython_compile
(
source
,
options
=
options
,
full_module_name
=
module
)
full_module_name
=
module
)
def
run_distutils
(
self
,
test_directory
,
module
,
workdir
,
incdir
,
def
run_distutils
(
self
,
test_directory
,
module
,
workdir
,
incdir
,
extra_extension_args
=
None
):
extra_extension_args
=
None
):
cwd
=
os
.
getcwd
()
cwd
=
os
.
getcwd
()
os
.
chdir
(
workdir
)
os
.
chdir
(
workdir
)
...
@@ -394,10 +394,10 @@ class CythonCompileTestCase(unittest.TestCase):
...
@@ -394,10 +394,10 @@ class CythonCompileTestCase(unittest.TestCase):
if
match
(
module
):
if
match
(
module
):
ext_include_dirs
+=
get_additional_include_dirs
()
ext_include_dirs
+=
get_additional_include_dirs
()
self
.
copy_related_files
(
test_directory
,
workdir
,
module
)
self
.
copy_related_files
(
test_directory
,
workdir
,
module
)
if
extra_extension_args
is
None
:
if
extra_extension_args
is
None
:
extra_extension_args
=
{}
extra_extension_args
=
{}
extension
=
Extension
(
extension
=
Extension
(
module
,
module
,
sources
=
self
.
find_source_files
(
workdir
,
module
),
sources
=
self
.
find_source_files
(
workdir
,
module
),
...
@@ -676,9 +676,9 @@ class CythonPyregrTestCase(CythonRunTestCase):
...
@@ -676,9 +676,9 @@ class CythonPyregrTestCase(CythonRunTestCase):
result
.
addSkip
(
self
,
'ok'
)
result
.
addSkip
(
self
,
'ok'
)
# Someone wrapped this in a:
# Someone wrapped this in a:
# 'try: import gdb; ... except: include_debugger = False' thing, but don't do
# 'try: import gdb; ... except: include_debugger = False' thing, but don't do
# this, it doesn't work as gdb is a builtin module in GDB. The tests themselves
# this, it doesn't work as gdb is a builtin module in GDB. The tests themselves
# are doing the skipping. If there's a problem with the tests, please file an
# are doing the skipping. If there's a problem with the tests, please file an
# issue.
# issue.
include_debugger
=
sys
.
version_info
[:
2
]
>
(
2
,
5
)
include_debugger
=
sys
.
version_info
[:
2
]
>
(
2
,
5
)
...
@@ -690,7 +690,7 @@ def collect_unittests(path, module_prefix, suite, selectors):
...
@@ -690,7 +690,7 @@ def collect_unittests(path, module_prefix, suite, selectors):
return
dirname
==
"Tests"
return
dirname
==
"Tests"
loader
=
unittest
.
TestLoader
()
loader
=
unittest
.
TestLoader
()
if
include_debugger
:
if
include_debugger
:
skipped_dirs
=
[]
skipped_dirs
=
[]
else
:
else
:
...
@@ -729,7 +729,7 @@ def collect_doctests(path, module_prefix, suite, selectors):
...
@@ -729,7 +729,7 @@ def collect_doctests(path, module_prefix, suite, selectors):
return
dirname
not
in
(
"Mac"
,
"Distutils"
,
"Plex"
)
return
dirname
not
in
(
"Mac"
,
"Distutils"
,
"Plex"
)
def
file_matches
(
filename
):
def
file_matches
(
filename
):
filename
,
ext
=
os
.
path
.
splitext
(
filename
)
filename
,
ext
=
os
.
path
.
splitext
(
filename
)
blacklist
=
[
'libcython'
,
'libpython'
,
'test_libcython_in_gdb'
,
blacklist
=
[
'libcython'
,
'libpython'
,
'test_libcython_in_gdb'
,
'TestLibCython'
]
'TestLibCython'
]
return
(
ext
==
'.py'
and
not
return
(
ext
==
'.py'
and
not
'~'
in
filename
and
not
'~'
in
filename
and
not
...
@@ -766,7 +766,7 @@ class EndToEndTest(unittest.TestCase):
...
@@ -766,7 +766,7 @@ class EndToEndTest(unittest.TestCase):
directory structure and its header gives a list of commands to run.
directory structure and its header gives a list of commands to run.
"""
"""
cython_root
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
cython_root
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
def
__init__
(
self
,
treefile
,
workdir
,
cleanup_workdir
=
True
):
def
__init__
(
self
,
treefile
,
workdir
,
cleanup_workdir
=
True
):
self
.
treefile
=
treefile
self
.
treefile
=
treefile
self
.
workdir
=
os
.
path
.
join
(
workdir
,
os
.
path
.
splitext
(
treefile
)[
0
])
self
.
workdir
=
os
.
path
.
join
(
workdir
,
os
.
path
.
splitext
(
treefile
)[
0
])
...
@@ -797,7 +797,7 @@ class EndToEndTest(unittest.TestCase):
...
@@ -797,7 +797,7 @@ class EndToEndTest(unittest.TestCase):
if
self
.
cleanup_workdir
:
if
self
.
cleanup_workdir
:
shutil
.
rmtree
(
self
.
workdir
)
shutil
.
rmtree
(
self
.
workdir
)
os
.
chdir
(
self
.
old_dir
)
os
.
chdir
(
self
.
old_dir
)
def
runTest
(
self
):
def
runTest
(
self
):
commands
=
(
self
.
commands
commands
=
(
self
.
commands
.
replace
(
"CYTHON"
,
"PYTHON %s"
%
os
.
path
.
join
(
self
.
cython_root
,
'cython.py'
))
.
replace
(
"CYTHON"
,
"PYTHON %s"
%
os
.
path
.
join
(
self
.
cython_root
,
'cython.py'
))
...
@@ -832,15 +832,15 @@ class EndToEndTest(unittest.TestCase):
...
@@ -832,15 +832,15 @@ class EndToEndTest(unittest.TestCase):
# TODO: Windows support.
# TODO: Windows support.
class
EmbedTest
(
unittest
.
TestCase
):
class
EmbedTest
(
unittest
.
TestCase
):
working_dir
=
"Demos/embed"
working_dir
=
"Demos/embed"
def
setUp
(
self
):
def
setUp
(
self
):
self
.
old_dir
=
os
.
getcwd
()
self
.
old_dir
=
os
.
getcwd
()
os
.
chdir
(
self
.
working_dir
)
os
.
chdir
(
self
.
working_dir
)
os
.
system
(
os
.
system
(
"make PYTHON='%s' clean > /dev/null"
%
sys
.
executable
)
"make PYTHON='%s' clean > /dev/null"
%
sys
.
executable
)
def
tearDown
(
self
):
def
tearDown
(
self
):
try
:
try
:
os
.
system
(
os
.
system
(
...
@@ -848,7 +848,7 @@ class EmbedTest(unittest.TestCase):
...
@@ -848,7 +848,7 @@ class EmbedTest(unittest.TestCase):
except
:
except
:
pass
pass
os
.
chdir
(
self
.
old_dir
)
os
.
chdir
(
self
.
old_dir
)
def
test_embed
(
self
):
def
test_embed
(
self
):
from
distutils
import
sysconfig
from
distutils
import
sysconfig
libname
=
sysconfig
.
get_config_var
(
'LIBRARY'
)
libname
=
sysconfig
.
get_config_var
(
'LIBRARY'
)
...
@@ -912,7 +912,7 @@ class FileListExcluder:
...
@@ -912,7 +912,7 @@ class FileListExcluder:
self
.
excludes
[
line
.
split
()[
0
]]
=
True
self
.
excludes
[
line
.
split
()[
0
]]
=
True
finally
:
finally
:
f
.
close
()
f
.
close
()
def
__call__
(
self
,
testname
):
def
__call__
(
self
,
testname
):
return
testname
in
self
.
excludes
or
testname
.
split
(
'.'
)[
-
1
]
in
self
.
excludes
return
testname
in
self
.
excludes
or
testname
.
split
(
'.'
)[
-
1
]
in
self
.
excludes
...
@@ -996,7 +996,7 @@ def main():
...
@@ -996,7 +996,7 @@ def main():
help
=
"do not run the file based tests"
)
help
=
"do not run the file based tests"
)
parser
.
add_option
(
"--no-pyregr"
,
dest
=
"pyregr"
,
parser
.
add_option
(
"--no-pyregr"
,
dest
=
"pyregr"
,
action
=
"store_false"
,
default
=
True
,
action
=
"store_false"
,
default
=
True
,
help
=
"do not run the regression tests of CPython in tests/pyregr/"
)
help
=
"do not run the regression tests of CPython in tests/pyregr/"
)
parser
.
add_option
(
"--cython-only"
,
dest
=
"cython_only"
,
parser
.
add_option
(
"--cython-only"
,
dest
=
"cython_only"
,
action
=
"store_true"
,
default
=
False
,
action
=
"store_true"
,
default
=
False
,
help
=
"only compile pyx to c, do not run C compiler or run the tests"
)
help
=
"only compile pyx to c, do not run C compiler or run the tests"
)
...
@@ -1146,16 +1146,16 @@ def main():
...
@@ -1146,16 +1146,16 @@ def main():
# Chech which external modules are not present and exclude tests
# Chech which external modules are not present and exclude tests
# which depends on them (by prefix)
# which depends on them (by prefix)
missing_dep_excluder
=
MissingDependencyExcluder
(
EXT_DEP_MODULES
)
missing_dep_excluder
=
MissingDependencyExcluder
(
EXT_DEP_MODULES
)
version_dep_excluder
=
VersionDependencyExcluder
(
VER_DEP_MODULES
)
version_dep_excluder
=
VersionDependencyExcluder
(
VER_DEP_MODULES
)
exclude_selectors
=
[
missing_dep_excluder
,
version_dep_excluder
]
# want to pring msg at exit
exclude_selectors
=
[
missing_dep_excluder
,
version_dep_excluder
]
# want to pring msg at exit
if
options
.
exclude
:
if
options
.
exclude
:
exclude_selectors
+=
[
re
.
compile
(
r
,
re
.
I
|
re
.
U
).
search
for
r
in
options
.
exclude
]
exclude_selectors
+=
[
re
.
compile
(
r
,
re
.
I
|
re
.
U
).
search
for
r
in
options
.
exclude
]
if
not
test_bugs
:
if
not
test_bugs
:
exclude_selectors
+=
[
FileListExcluder
(
"tests/bugs.txt"
)
]
exclude_selectors
+=
[
FileListExcluder
(
"tests/bugs.txt"
)
]
if
sys
.
platform
in
[
'win32'
,
'cygwin'
]
and
sys
.
version_info
<
(
2
,
6
):
if
sys
.
platform
in
[
'win32'
,
'cygwin'
]
and
sys
.
version_info
<
(
2
,
6
):
exclude_selectors
+=
[
lambda
x
:
x
==
"run.specialfloat"
]
exclude_selectors
+=
[
lambda
x
:
x
==
"run.specialfloat"
]
...
@@ -1206,7 +1206,7 @@ def main():
...
@@ -1206,7 +1206,7 @@ def main():
ignored_modules
=
(
'Options'
,
'Version'
,
'DebugFlags'
,
'CmdLine'
)
ignored_modules
=
(
'Options'
,
'Version'
,
'DebugFlags'
,
'CmdLine'
)
modules
=
[
module
for
name
,
module
in
sys
.
modules
.
items
()
modules
=
[
module
for
name
,
module
in
sys
.
modules
.
items
()
if
module
is
not
None
and
if
module
is
not
None
and
name
.
startswith
(
'Cython.Compiler.'
)
and
name
.
startswith
(
'Cython.Compiler.'
)
and
name
[
len
(
'Cython.Compiler.'
):]
not
in
ignored_modules
]
name
[
len
(
'Cython.Compiler.'
):]
not
in
ignored_modules
]
if
options
.
coverage
:
if
options
.
coverage
:
coverage
.
report
(
modules
,
show_missing
=
0
)
coverage
.
report
(
modules
,
show_missing
=
0
)
...
...
setup.py
View file @
5f531449
...
@@ -66,7 +66,7 @@ else:
...
@@ -66,7 +66,7 @@ else:
'Cython'
:
[
p
[
7
:]
for
p
in
pxd_include_patterns
],
'Cython'
:
[
p
[
7
:]
for
p
in
pxd_include_patterns
],
}
}
# This dict is used for passing extra arguments that are setuptools
# This dict is used for passing extra arguments that are setuptools
# specific to setup
# specific to setup
setuptools_extra_args
=
{}
setuptools_extra_args
=
{}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment