Commit 75562e57 authored by Kevin Modzelewski's avatar Kevin Modzelewski Committed by GitHub

Merge pull request #1363 from kmod/cpython_tests

Take a pass over the CPython tests
parents 9042baf8 a2c2b89b
...@@ -333,7 +333,7 @@ endmacro() ...@@ -333,7 +333,7 @@ endmacro()
add_pyston_test(defaults tests --order-by-mtime -t50) add_pyston_test(defaults tests --order-by-mtime -t50)
add_pyston_test(force_llvm tests -a=-n -t90) add_pyston_test(force_llvm tests -a=-n -t90)
if(${CMAKE_BUILD_TYPE} STREQUAL "Release") if(${CMAKE_BUILD_TYPE} STREQUAL "Release")
add_pyston_test(max_compilation_tier tests -a=-O -t50) add_pyston_test(max_compilation_tier tests -a=-L -t50)
endif() endif()
add_pyston_test(defaults cpython --exit-code-only --skip-failing -t100) add_pyston_test(defaults cpython --exit-code-only --skip-failing -t100)
add_pyston_test(defaults integration --exit-code-only --skip-failing -t900) add_pyston_test(defaults integration --exit-code-only --skip-failing -t900)
......
...@@ -31,8 +31,8 @@ GTEST_DIR := $(DEPS_DIR)/gtest-1.7.0 ...@@ -31,8 +31,8 @@ GTEST_DIR := $(DEPS_DIR)/gtest-1.7.0
USE_DEBUG_LIBUNWIND := 0 USE_DEBUG_LIBUNWIND := 0
MAX_MEM_KB := 500000 MAX_MEM_KB := 1500000
MAX_DBG_MEM_KB := 500000 MAX_DBG_MEM_KB := 1500000
TEST_THREADS := 1 TEST_THREADS := 1
...@@ -411,10 +411,10 @@ $1_unittest: ...@@ -411,10 +411,10 @@ $1_unittest:
$(NINJA) -C $(CMAKE_DIR_DBG) $1_unittest $(NINJAFLAGS) $(NINJA) -C $(CMAKE_DIR_DBG) $1_unittest $(NINJAFLAGS)
ln -sf $(CMAKE_DIR_DBG)/$1_unittest . ln -sf $(CMAKE_DIR_DBG)/$1_unittest .
dbg_$1_unittests: $1_unittest dbg_$1_unittests: $1_unittest
zsh -c 'ulimit -m $(MAX_MEM_KB); time $(GDB) $(GDB_CMDS) --args ./$1_unittest --gtest_break_on_failure $(ARGS)' zsh -c 'ulimit -v $(MAX_MEM_KB); time $(GDB) $(GDB_CMDS) --args ./$1_unittest --gtest_break_on_failure $(ARGS)'
unittests:: $1_unittest unittests:: $1_unittest
run_$1_unittests: $1_unittest run_$1_unittests: $1_unittest
zsh -c 'ulimit -m $(MAX_MEM_KB); time ./$1_unittest $(ARGS)' zsh -c 'ulimit -v $(MAX_MEM_KB); time ./$1_unittest $(ARGS)'
run_unittests:: run_$1_unittests run_unittests:: run_$1_unittests
) )
endef endef
...@@ -759,23 +759,23 @@ define make_target ...@@ -759,23 +759,23 @@ define make_target
$(eval \ $(eval \
.PHONY: test$1 check$1 .PHONY: test$1 check$1
check$1 test$1: $(PYTHON_EXE_DEPS) pyston$1 check$1 test$1: $(PYTHON_EXE_DEPS) pyston$1
$(PYTHON) $(TOOLS_DIR)/tester.py -R pyston$1 -j$(TEST_THREADS) -a=-S -k $(TESTS_DIR) $(ARGS) $(PYTHON) $(TOOLS_DIR)/tester.py -q -R pyston$1 -j$(TEST_THREADS) -a=-S -k $(TESTS_DIR) $(ARGS)
@# we pass -I to cpython tests and skip failing ones because they are sloooow otherwise @# we pass -I to cpython tests and skip failing ones because they are sloooow otherwise
$(PYTHON) $(TOOLS_DIR)/tester.py -R pyston$1 -j$(TEST_THREADS) -a=-S -k --exit-code-only --skip-failing -t50 $(TEST_DIR)/cpython $(ARGS) $(PYTHON) $(TOOLS_DIR)/tester.py -q -R pyston$1 -j$(TEST_THREADS) -a=-S -k --exit-code-only --skip-failing -t50 $(TEST_DIR)/cpython $(ARGS)
$(PYTHON) $(TOOLS_DIR)/tester.py -R pyston$1 -j$(TEST_THREADS) -k -a=-S --exit-code-only --skip-failing -t600 $(TEST_DIR)/integration $(ARGS) $(PYTHON) $(TOOLS_DIR)/tester.py -q -R pyston$1 -j$(TEST_THREADS) -k -a=-S --exit-code-only --skip-failing -t600 $(TEST_DIR)/integration $(ARGS)
$(PYTHON) $(TOOLS_DIR)/tester.py -R pyston$1 -j$(TEST_THREADS) -a=-n -a=-S -t50 -k $(TESTS_DIR) $(ARGS) $(PYTHON) $(TOOLS_DIR)/tester.py -q -R pyston$1 -j$(TEST_THREADS) -a=-n -a=-S -t50 -k $(TESTS_DIR) $(ARGS)
$(PYTHON) $(TOOLS_DIR)/tester.py -R pyston$1 -j$(TEST_THREADS) -a=-O -a=-S -k $(TESTS_DIR) $(ARGS) $(PYTHON) $(TOOLS_DIR)/tester.py -q -R pyston$1 -j$(TEST_THREADS) -a=-L -a=-S -k $(TESTS_DIR) $(ARGS)
.PHONY: run$1 dbg$1 .PHONY: run$1 dbg$1
run$1: pyston$1 $$(RUN_DEPS) run$1: pyston$1 $$(RUN_DEPS)
PYTHONPATH=test/test_extension:$${PYTHONPATH} ./pyston$1 $$(ARGS) PYTHONPATH=test/test_extension:$${PYTHONPATH} ./pyston$1 $$(ARGS)
dbg$1: pyston$1 $$(RUN_DEPS) dbg$1: pyston$1 $$(RUN_DEPS)
PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -m $$(MAX_DBG_MEM_KB); $$(GDB) $$(GDB_CMDS) --args ./pyston$1 $$(ARGS)' PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -v $$(MAX_DBG_MEM_KB); $$(GDB) $$(GDB_CMDS) --args ./pyston$1 $$(ARGS)'
nosearch_run$1_%: %.py pyston$1 $$(RUN_DEPS) nosearch_run$1_%: %.py pyston$1 $$(RUN_DEPS)
$(VERB) PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -m $$(MAX_MEM_KB); time ./pyston$1 $$(ARGS) $$<' $(VERB) PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -v $$(MAX_MEM_KB); time ./pyston$1 $$(ARGS) $$<'
$$(call make_search,run$1_%) $$(call make_search,run$1_%)
nosearch_dbg$1_%: %.py pyston$1 $$(RUN_DEPS) nosearch_dbg$1_%: %.py pyston$1 $$(RUN_DEPS)
$(VERB) PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -m $$(MAX_DBG_MEM_KB); $$(GDB) $$(GDB_CMDS) --args ./pyston$1 $$(ARGS) $$<' $(VERB) PYTHONPATH=test/test_extension:$${PYTHONPATH} zsh -c 'ulimit -v $$(MAX_DBG_MEM_KB); $$(GDB) $$(GDB_CMDS) --args ./pyston$1 $$(ARGS) $$<'
$$(call make_search,dbg$1_%) $$(call make_search,dbg$1_%)
ifneq ($$(ENABLE_VALGRIND),0) ifneq ($$(ENABLE_VALGRIND),0)
...@@ -905,7 +905,7 @@ opreportcg: ...@@ -905,7 +905,7 @@ opreportcg:
.PHONY: watch_% watch wdbg_% .PHONY: watch_% watch wdbg_%
watch_%: watch_%:
@ ( ulimit -t 60; ulimit -m $(MAK_MEM_KB); \ @ ( ulimit -t 60; ulimit -v $(MAK_MEM_KB); \
TARGET=$(dir $@)$(patsubst watch_%,%,$(notdir $@)); \ TARGET=$(dir $@)$(patsubst watch_%,%,$(notdir $@)); \
clear; $(MAKE) $$TARGET $(WATCH_ARGS); true; \ clear; $(MAKE) $$TARGET $(WATCH_ARGS); true; \
while inotifywait -q -e modify -e attrib -e move -e move_self -e create -e delete -e delete_self \ while inotifywait -q -e modify -e attrib -e move -e move_self -e create -e delete -e delete_self \
...@@ -921,7 +921,7 @@ wdbg_%: ...@@ -921,7 +921,7 @@ wdbg_%:
$(MAKE) $(patsubst wdbg_%,watch_dbg_%,$@) GDB_POST_CMDS="--ex quit" $(MAKE) $(patsubst wdbg_%,watch_dbg_%,$@) GDB_POST_CMDS="--ex quit"
.PHONY: head_% .PHONY: head_%
HEAD := 40 HEAD ?= 40
HEAD_SKIP := 6 HEAD_SKIP := 6
head_%: head_%:
@ bash -c "set -o pipefail; script -e -q -c '$(MAKE) $(dir $@)$(patsubst head_%,%,$(notdir $@))' /dev/null | tail -n+$(HEAD_SKIP) | head -n$(HEAD)" @ bash -c "set -o pipefail; script -e -q -c '$(MAKE) $(dir $@)$(patsubst head_%,%,$(notdir $@))' /dev/null | tail -n+$(HEAD_SKIP) | head -n$(HEAD)"
...@@ -953,7 +953,7 @@ test_cpp_ll: ...@@ -953,7 +953,7 @@ test_cpp_ll:
.PHONY: bench_exceptions .PHONY: bench_exceptions
bench_exceptions: bench_exceptions:
$(CLANGPP_EXE) $(TEST_DIR)/bench_exceptions.cpp -o bench_exceptions -O3 -std=c++11 $(CLANGPP_EXE) $(TEST_DIR)/bench_exceptions.cpp -o bench_exceptions -O3 -std=c++11
zsh -c 'ulimit -m $(MAX_MEM_KB); time ./bench_exceptions' zsh -c 'ulimit -v $(MAX_MEM_KB); time ./bench_exceptions'
rm bench_exceptions rm bench_exceptions
TEST_EXT_MODULE_NAMES := basic_test descr_test slots_test type_test api_test TEST_EXT_MODULE_NAMES := basic_test descr_test slots_test type_test api_test
......
# Copy any changed stdlib files to the destination: # Copy any changed stdlib files to the destination:
file(GLOB_RECURSE STDLIB_SRCS Lib/*.py) file(GLOB_RECURSE STDLIB_SRCS Lib/*.py)
file(GLOB STDLIB_MORE_SRCS Lib/lib2to3/*.txt)
set(STDLIB_SRCS ${STDLIB_MORE_SRCS} ${STDLIB_SRCS})
file(GLOB STDLIB_MORE_SRCS Lib/email/test/data/*)
set(STDLIB_SRCS ${STDLIB_MORE_SRCS} ${STDLIB_SRCS})
file(GLOB_RECURSE STD_INCLUDES Include/*.h) file(GLOB_RECURSE STD_INCLUDES Include/*.h)
set(STDLIB_TARGETS "") set(STDLIB_TARGETS "")
foreach(STDLIB_FILE ${STDLIB_SRCS}) foreach(STDLIB_FILE ${STDLIB_SRCS})
...@@ -181,6 +188,7 @@ add_custom_command(OUTPUT ${STDMODULES} ...@@ -181,6 +188,7 @@ add_custom_command(OUTPUT ${STDMODULES}
DEPENDS DEPENDS
pyston pyston
copy_stdlib copy_stdlib
setup.py
Modules/_multiprocessing/multiprocessing.c Modules/_multiprocessing/multiprocessing.c
Modules/_multiprocessing/semaphore.c Modules/_multiprocessing/semaphore.c
Modules/_multiprocessing/socket_connection.c Modules/_multiprocessing/socket_connection.c
......
...@@ -15,8 +15,9 @@ a thread-local object and use its attributes: ...@@ -15,8 +15,9 @@ a thread-local object and use its attributes:
42 42
You can also access the local-object's dictionary: You can also access the local-object's dictionary:
(Pyston change: changed this to dict(mydata.__dict__) to make more portable)
>>> mydata.__dict__ >>> dict(mydata.__dict__)
{'number': 42} {'number': 42}
>>> mydata.__dict__.setdefault('widgets', []) >>> mydata.__dict__.setdefault('widgets', [])
[] []
......
# -*- coding: uft-8 -*- # -*- coding: uft-8 -*-
# skip-if: True
#coding: utf8 #coding: utf8
# skip-if: True
print '我' print '我'
...@@ -7,8 +7,7 @@ import re ...@@ -7,8 +7,7 @@ import re
import os.path import os.path
import tempfile import tempfile
import subprocess import subprocess
# Pyston change: we can't import this currently import py_compile
# import py_compile
import contextlib import contextlib
import shutil import shutil
try: try:
......
# expected: fail
import ConfigParser import ConfigParser
import StringIO import StringIO
import os import os
......
# expected: fail
import test.test_support, unittest import test.test_support, unittest
import os import os
......
# expected: fail
"""This test checks for correct fork() behavior. """This test checks for correct fork() behavior.
""" """
......
# expected: fail
# Check that multiple features can be enabled. # Check that multiple features can be enabled.
from __future__ import unicode_literals, print_function from __future__ import unicode_literals, print_function
import sys import sys
import unittest import unittest
from . import test_support # Pyston change: changed to absolute import
from test import test_support
class TestMultipleFeatures(unittest.TestCase): class TestMultipleFeatures(unittest.TestCase):
......
# expected: fail
from test import test_support as support from test import test_support as support
# If we end up with a significant number of tests that don't require # If we end up with a significant number of tests that don't require
# threading, this test module should be split. Right now we skip # threading, this test module should be split. Right now we skip
...@@ -11,7 +10,8 @@ import os.path ...@@ -11,7 +10,8 @@ import os.path
import SocketServer import SocketServer
import time import time
from test_support import reap_threads, verbose, transient_internet # Pyston change: changed to absolute import
from test.test_support import reap_threads, verbose, transient_internet
import unittest import unittest
try: try:
......
# expected: fail # expected: fail
# fails because capifunc's don't have __name__ attributes,
# which causes from_cpython/Lib/fractions.py to error
import unittest import unittest
from test import test_support from test import test_support
from itertools import * from itertools import *
...@@ -140,11 +138,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -140,11 +138,6 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
@test_support.bigaddrspacetest
def test_combinations_overflow(self):
with self.assertRaises((OverflowError, MemoryError)):
combinations("AA", 2**29)
@test_support.impl_detail("tuple reuse is specific to CPython") @test_support.impl_detail("tuple reuse is specific to CPython")
def test_combinations_tuple_reuse(self): def test_combinations_tuple_reuse(self):
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1) self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
...@@ -216,11 +209,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -216,11 +209,6 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version
self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version
@test_support.bigaddrspacetest
def test_combinations_with_replacement_overflow(self):
with self.assertRaises((OverflowError, MemoryError)):
combinations_with_replacement("AA", 2**30)
@test_support.impl_detail("tuple reuse is specific to CPython") @test_support.impl_detail("tuple reuse is specific to CPython")
def test_combinations_with_replacement_tuple_reuse(self): def test_combinations_with_replacement_tuple_reuse(self):
cwr = combinations_with_replacement cwr = combinations_with_replacement
...@@ -287,11 +275,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -287,11 +275,6 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(permutations(values, None))) # test r as None self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r self.assertEqual(result, list(permutations(values))) # test default r
@test_support.bigaddrspacetest
def test_permutations_overflow(self):
with self.assertRaises((OverflowError, MemoryError)):
permutations("A", 2**30)
@test_support.impl_detail("tuple reuse is specific to CPython") @test_support.impl_detail("tuple reuse is specific to CPython")
def test_permutations_tuple_reuse(self): def test_permutations_tuple_reuse(self):
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1) self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
...@@ -376,8 +359,7 @@ class TestBasicOps(unittest.TestCase): ...@@ -376,8 +359,7 @@ class TestBasicOps(unittest.TestCase):
c = count(value) c = count(value)
self.assertEqual(next(copy.copy(c)), value) self.assertEqual(next(copy.copy(c)), value)
self.assertEqual(next(copy.deepcopy(c)), value) self.assertEqual(next(copy.deepcopy(c)), value)
for proto in range(pickle.HIGHEST_PROTOCOL + 1): self.assertEqual(next(pickle.loads(pickle.dumps(c))), value)
self.assertEqual(next(pickle.loads(pickle.dumps(c, proto))), value)
def test_count_with_stride(self): def test_count_with_stride(self):
self.assertEqual(zip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)]) self.assertEqual(zip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)])
...@@ -710,11 +692,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -710,11 +692,6 @@ class TestBasicOps(unittest.TestCase):
args = map(iter, args) args = map(iter, args)
self.assertEqual(len(list(product(*args))), expected_len) self.assertEqual(len(list(product(*args))), expected_len)
@test_support.bigaddrspacetest
def test_product_overflow(self):
with self.assertRaises((OverflowError, MemoryError)):
product(*(['ab']*2**5), repeat=2**25)
@test_support.impl_detail("tuple reuse is specific to CPython") @test_support.impl_detail("tuple reuse is specific to CPython")
def test_product_tuple_reuse(self): def test_product_tuple_reuse(self):
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1) self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
...@@ -722,9 +699,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -722,9 +699,6 @@ class TestBasicOps(unittest.TestCase):
def test_repeat(self): def test_repeat(self):
self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a']) self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a'])
self.assertEqual(list(repeat(object='a', times=0)), [])
self.assertEqual(list(repeat(object='a', times=-1)), [])
self.assertEqual(list(repeat(object='a', times=-2)), [])
self.assertEqual(zip(xrange(3),repeat('a')), self.assertEqual(zip(xrange(3),repeat('a')),
[(0, 'a'), (1, 'a'), (2, 'a')]) [(0, 'a'), (1, 'a'), (2, 'a')])
self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a']) self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a'])
...@@ -741,12 +715,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -741,12 +715,6 @@ class TestBasicOps(unittest.TestCase):
list(r) list(r)
self.assertEqual(repr(r), 'repeat((1+0j), 0)') self.assertEqual(repr(r), 'repeat((1+0j), 0)')
def test_repeat_with_negative_times(self):
self.assertEqual(repr(repeat('a', -1)), "repeat('a', 0)")
self.assertEqual(repr(repeat('a', -2)), "repeat('a', 0)")
self.assertEqual(repr(repeat('a', times=-1)), "repeat('a', 0)")
self.assertEqual(repr(repeat('a', times=-2)), "repeat('a', 0)")
def test_imap(self): def test_imap(self):
self.assertEqual(list(imap(operator.pow, range(3), range(1,7))), self.assertEqual(list(imap(operator.pow, range(3), range(1,7))),
[0**1, 1**2, 2**3]) [0**1, 1**2, 2**3])
...@@ -832,7 +800,6 @@ class TestBasicOps(unittest.TestCase): ...@@ -832,7 +800,6 @@ class TestBasicOps(unittest.TestCase):
it = islice(it, 1) it = islice(it, 1)
self.assertIsNotNone(wr()) self.assertIsNotNone(wr())
list(it) # exhaust the iterator list(it) # exhaust the iterator
test_support.gc_collect()
self.assertIsNone(wr()) self.assertIsNone(wr())
def test_takewhile(self): def test_takewhile(self):
......
# expected: fail # skip-if: not IS_OPTIMIZED
# Skipping test_parser and test_all_fixers # Skipping test_parser and test_all_fixers
# because of running # because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor, from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
......
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
""" Test suite for the code in msilib """ """ Test suite for the code in msilib """
import unittest import unittest
import os import os
from test_support import run_unittest, import_module # Pyston change: changed to an absolute import due to our changed way of running the tests
from test.test_support import run_unittest, import_module
msilib = import_module('msilib') msilib = import_module('msilib')
class Test_make_id(unittest.TestCase): class Test_make_id(unittest.TestCase):
......
# skip-if: True
# - This test hangs and the tester fails to clean it up. # - This test hangs and the tester fails to clean it up.
# #
# Unit tests for the multiprocessing package # Unit tests for the multiprocessing package
......
# expected: fail
# Tests for rich comparisons # Tests for rich comparisons
import unittest import unittest
......
# expected: fail # skip-if: True
# - this isn't an actual test file
"""Supporting definitions for the Python regression tests.""" """Supporting definitions for the Python regression tests."""
if __name__ != 'test.test_support': if __name__ != 'test.test_support':
......
# expected: fail
# -*- coding: iso-8859-15 -*- # -*- coding: iso-8859-15 -*-
import sys import sys
......
# expected: fail
# tempfile.py unit tests. # tempfile.py unit tests.
import tempfile import tempfile
import errno import errno
......
# expected: fail
import os import os
import unittest import unittest
import random import random
......
# expected: fail
import unittest import unittest
from doctest import DocTestSuite from doctest import DocTestSuite
from test import test_support from test import test_support
......
# expected: fail
doctests = """ doctests = """
Tests for the tokenize module. Tests for the tokenize module.
......
# expected: fail
# test_pickle dumps and loads pickles via pickle.py. # test_pickle dumps and loads pickles via pickle.py.
# test_cpickle does the same, but via the cPickle module. # test_cpickle does the same, but via the cPickle module.
# This test covers the other two cases, making pickles with one module and # This test covers the other two cases, making pickles with one module and
......
# expected: fail
# We can test part of the module without zlib. # We can test part of the module without zlib.
try: try:
import zlib import zlib
......
# skip-if: True
# - not a test file itself
# Helper script for test_tempfile.py. argv[2] is the number of a file # Helper script for test_tempfile.py. argv[2] is the number of a file
# descriptor which should _not_ be open. Check this by attempting to # descriptor which should _not_ be open. Check this by attempting to
# write to it -- if we succeed, something is wrong. # write to it -- if we succeed, something is wrong.
......
...@@ -436,7 +436,7 @@ w_object(PyObject *v, WFILE *p) ...@@ -436,7 +436,7 @@ w_object(PyObject *v, WFILE *p)
} }
else if (PyCode_Check(v)) { else if (PyCode_Check(v)) {
// Pyston change: not implemented // Pyston change: not implemented
assert(0 && "not implemented"); assert(0 && "Marshalling of code objects not implemented");
abort(); abort();
/* /*
PyCodeObject *co = (PyCodeObject *)v; PyCodeObject *co = (PyCodeObject *)v;
......
...@@ -452,6 +452,8 @@ Box* ASTInterpreter::executeInner(ASTInterpreter& interpreter, CFGBlock* start_b ...@@ -452,6 +452,8 @@ Box* ASTInterpreter::executeInner(ASTInterpreter& interpreter, CFGBlock* start_b
Box* ASTInterpreter::execute(ASTInterpreter& interpreter, CFGBlock* start_block, BST_stmt* start_at) { Box* ASTInterpreter::execute(ASTInterpreter& interpreter, CFGBlock* start_block, BST_stmt* start_at) {
UNAVOIDABLE_STAT_TIMER(t0, "us_timer_in_interpreter"); UNAVOIDABLE_STAT_TIMER(t0, "us_timer_in_interpreter");
RECURSIVE_BLOCK(CXX, " in function call");
return executeInnerAndSetupFrame(interpreter, start_block, start_at); return executeInnerAndSetupFrame(interpreter, start_block, start_at);
} }
......
...@@ -182,9 +182,11 @@ static void enableGdbSegfaultWatcher() { ...@@ -182,9 +182,11 @@ static void enableGdbSegfaultWatcher() {
} }
int handleArg(char code) { int handleArg(char code) {
if (code == 'O') if (code == 'O') {
Py_OptimizeFlag++;
} else if (code == 'L') {
FORCE_OPTIMIZE = true; FORCE_OPTIMIZE = true;
else if (code == 'q') } else if (code == 'q')
GLOBAL_VERBOSITY = 0; GLOBAL_VERBOSITY = 0;
else if (code == 'v') { else if (code == 'v') {
if (GLOBAL_VERBOSITY) if (GLOBAL_VERBOSITY)
...@@ -219,6 +221,8 @@ int handleArg(char code) { ...@@ -219,6 +221,8 @@ int handleArg(char code) {
USE_REGALLOC_BASIC = false; USE_REGALLOC_BASIC = false;
} else if (code == 'E') { } else if (code == 'E') {
Py_IgnoreEnvironmentFlag = 1; Py_IgnoreEnvironmentFlag = 1;
} else if (code == 'B') {
Py_DontWriteBytecodeFlag++;
} else if (code == 'P') { } else if (code == 'P') {
PAUSE_AT_ABORT = true; PAUSE_AT_ABORT = true;
} else if (code == 'F') { } else if (code == 'F') {
...@@ -327,7 +331,7 @@ static int main(int argc, char** argv) noexcept { ...@@ -327,7 +331,7 @@ static int main(int argc, char** argv) noexcept {
// Suppress getopt errors so we can throw them ourselves // Suppress getopt errors so we can throw them ourselves
opterr = 0; opterr = 0;
while ((code = getopt(argc, argv, "+:OqdIibpjtrTRSUvnxXEac:FuPTGm:")) != -1) { while ((code = getopt(argc, argv, "+:OLqdIibpjtrTRSUvnxXEBac:FuPTGm:")) != -1) {
if (code == 'c') { if (code == 'c') {
assert(optarg); assert(optarg);
command = optarg; command = optarg;
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include "pythread.h" #include "pythread.h"
#include "capi/typeobject.h" #include "capi/typeobject.h"
#include "capi/types.h"
#include "core/threading.h" #include "core/threading.h"
#include "core/types.h" #include "core/types.h"
#include "runtime/objmodel.h" #include "runtime/objmodel.h"
...@@ -79,7 +80,8 @@ static void* thread_start(STOLEN(Box*) target, STOLEN(Box*) varargs, STOLEN(Box* ...@@ -79,7 +80,8 @@ static void* thread_start(STOLEN(Box*) target, STOLEN(Box*) varargs, STOLEN(Box*
try { try {
autoDecref(runtimeCall(target, ArgPassSpec(0, 0, true, kwargs != NULL), varargs, kwargs, NULL, NULL, NULL)); autoDecref(runtimeCall(target, ArgPassSpec(0, 0, true, kwargs != NULL), varargs, kwargs, NULL, NULL, NULL));
} catch (ExcInfo e) { } catch (ExcInfo e) {
e.printExcAndTraceback(); if (!e.matches(SystemExit))
e.printExcAndTraceback();
e.clear(); e.clear();
} }
...@@ -198,14 +200,36 @@ Box* getIdent() { ...@@ -198,14 +200,36 @@ Box* getIdent() {
return boxInt(pthread_self()); return boxInt(pthread_self());
} }
Box* stackSize() { Box* stackSize(Box* arg) {
Py_FatalError("unimplemented"); if (arg) {
if (PyInt_Check(arg) && PyInt_AS_LONG(arg) == 0) {
Py_RETURN_NONE;
}
raiseExcHelper(ThreadError, "Changing initial stack size is not supported in Pyston");
}
return boxInt(0);
} }
Box* threadCount() { Box* threadCount() {
return boxInt(nb_threads); return boxInt(nb_threads);
} }
static PyObject* thread_PyThread_exit_thread(PyObject* self) noexcept {
PyErr_SetNone(PyExc_SystemExit);
return NULL;
}
PyDoc_STRVAR(exit_doc, "exit()\n\
(exit_thread() is an obsolete synonym)\n\
\n\
This is synonymous to ``raise SystemExit''. It will cause the current\n\
thread to exit silently unless the exception is caught.");
static PyMethodDef thread_methods[] = {
{ "exit_thread", (PyCFunction)thread_PyThread_exit_thread, METH_NOARGS, exit_doc },
{ "exit", (PyCFunction)thread_PyThread_exit_thread, METH_NOARGS, exit_doc },
};
void setupThread() { void setupThread() {
// Hacky: we want to use some of CPython's implementation of the thread module (the threading local stuff), // Hacky: we want to use some of CPython's implementation of the thread module (the threading local stuff),
// and some of ours (thread handling). Start off by calling a cut-down version of initthread, and then // and some of ours (thread handling). Start off by calling a cut-down version of initthread, and then
...@@ -227,7 +251,7 @@ void setupThread() { ...@@ -227,7 +251,7 @@ void setupThread() {
thread_module->giveAttr( thread_module->giveAttr(
"get_ident", new BoxedBuiltinFunctionOrMethod(BoxedCode::create((void*)getIdent, BOXED_INT, 0, "get_ident"))); "get_ident", new BoxedBuiltinFunctionOrMethod(BoxedCode::create((void*)getIdent, BOXED_INT, 0, "get_ident")));
thread_module->giveAttr("stack_size", new BoxedBuiltinFunctionOrMethod( thread_module->giveAttr("stack_size", new BoxedBuiltinFunctionOrMethod(
BoxedCode::create((void*)stackSize, BOXED_INT, 0, "stack_size"))); BoxedCode::create((void*)stackSize, UNKNOWN, 1, "stack_size"), { NULL }));
thread_module->giveAttr( thread_module->giveAttr(
"_count", new BoxedBuiltinFunctionOrMethod(BoxedCode::create((void*)threadCount, BOXED_INT, 0, "_count"))); "_count", new BoxedBuiltinFunctionOrMethod(BoxedCode::create((void*)threadCount, BOXED_INT, 0, "_count")));
...@@ -254,5 +278,10 @@ void setupThread() { ...@@ -254,5 +278,10 @@ void setupThread() {
ThreadError = (BoxedClass*)PyErr_NewException("thread.error", NULL, NULL); ThreadError = (BoxedClass*)PyErr_NewException("thread.error", NULL, NULL);
thread_module->giveAttrBorrowed("error", ThreadError); thread_module->giveAttrBorrowed("error", ThreadError);
auto thread_str = getStaticString("thread");
for (auto& md : thread_methods) {
thread_module->giveAttr(md.ml_name, new BoxedCApiFunction(&md, NULL, thread_str));
}
} }
} }
...@@ -628,7 +628,15 @@ extern "C" PyObject* PyExceptionInstance_Class(PyObject* o) noexcept { ...@@ -628,7 +628,15 @@ extern "C" PyObject* PyExceptionInstance_Class(PyObject* o) noexcept {
return PyInstance_Check(o) ? (Box*)static_cast<BoxedInstance*>(o)->inst_cls : o->cls; return PyInstance_Check(o) ? (Box*)static_cast<BoxedInstance*>(o)->inst_cls : o->cls;
} }
// With optimizations turned off, we can handle much fewer Python stack frames
// in the same amount of C-stack space.
// Use NDEBUG as a proxy for whether optimizations are turned on.
#ifndef NDEBUG
#define Py_DEFAULT_RECURSION_LIMIT 300
#else
#define Py_DEFAULT_RECURSION_LIMIT 1000 #define Py_DEFAULT_RECURSION_LIMIT 1000
#endif
static int recursion_limit = Py_DEFAULT_RECURSION_LIMIT; static int recursion_limit = Py_DEFAULT_RECURSION_LIMIT;
extern "C" { extern "C" {
int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT;
......
...@@ -4747,7 +4747,7 @@ Box* callFunc(BoxedFunctionBase* func, CallRewriteArgs* rewrite_args, ArgPassSpe ...@@ -4747,7 +4747,7 @@ Box* callFunc(BoxedFunctionBase* func, CallRewriteArgs* rewrite_args, ArgPassSpe
template <ExceptionStyle S> template <ExceptionStyle S>
static Box* callChosenCF(CompiledFunction* chosen_cf, BoxedClosure* closure, BoxedGenerator* generator, Box* globals, static Box* callChosenCF(CompiledFunction* chosen_cf, BoxedClosure* closure, BoxedGenerator* generator, Box* globals,
Box* oarg1, Box* oarg2, Box* oarg3, Box** oargs) noexcept(S == CAPI) { Box* oarg1, Box* oarg2, Box* oarg3, Box** oargs) noexcept(S == CAPI) {
// TODO: this should be done in the rewrite as well // TODO: this should go into the emitted code
RECURSIVE_BLOCK(S, " in function call"); RECURSIVE_BLOCK(S, " in function call");
if (S != chosen_cf->exception_style) { if (S != chosen_cf->exception_style) {
......
...@@ -1631,7 +1631,9 @@ static Box* function_new(BoxedClass* cls, Box* _code, Box* globals, Box** _args) ...@@ -1631,7 +1631,9 @@ static Box* function_new(BoxedClass* cls, Box* _code, Box* globals, Box** _args)
RELEASE_ASSERT(code->source, ""); RELEASE_ASSERT(code->source, "");
if (code->source->scoping.areGlobalsFromModule()) { if (code->source->scoping.areGlobalsFromModule()) {
RELEASE_ASSERT(unwrapAttrWrapper(globals) == code->source->parent_module, ""); RELEASE_ASSERT(globals->cls == attrwrapper_cls && unwrapAttrWrapper(globals) == code->source->parent_module,
"Pyston doesn't support creating functions with overridden globals on code objects that weren't "
"compiled for it");
globals = NULL; globals = NULL;
} else { } else {
RELEASE_ASSERT(PyDict_Check(globals) || globals->cls == attrwrapper_cls, ""); RELEASE_ASSERT(PyDict_Check(globals) || globals->cls == attrwrapper_cls, "");
...@@ -3475,10 +3477,23 @@ static int object_set_class(PyObject* self, PyObject* value, void* closure) noex ...@@ -3475,10 +3477,23 @@ static int object_set_class(PyObject* self, PyObject* value, void* closure) noex
} }
} }
static PyObject* object_sizeof(PyObject* self, PyObject* args) noexcept {
Py_ssize_t res, isize;
res = 0;
isize = self->cls->tp_itemsize;
if (isize > 0)
res = self->cls->ob_size * isize;
res += self->cls->tp_basicsize;
return PyInt_FromSsize_t(res);
}
static PyMethodDef object_methods[] = { static PyMethodDef object_methods[] = {
{ "__reduce_ex__", object_reduce_ex, METH_VARARGS, NULL }, // { "__reduce_ex__", object_reduce_ex, METH_VARARGS, NULL }, //
{ "__reduce__", object_reduce, METH_VARARGS, NULL }, // { "__reduce__", object_reduce, METH_VARARGS, NULL }, //
{ "__format__", object_format, METH_VARARGS, PyDoc_STR("default object formatter") }, { "__format__", object_format, METH_VARARGS, PyDoc_STR("default object formatter") },
{ "__sizeof__", object_sizeof, METH_NOARGS, PyDoc_STR("__sizeof__() -> int\nsize of object in memory, in bytes") },
{ NULL, NULL, 0, NULL }, { NULL, NULL, 0, NULL },
}; };
......
This diff is collapsed.
../../from_cpython/Lib/test/185test.db
\ No newline at end of file
../../from_cpython/Lib/test/Sine-1000Hz-300ms.aif
\ No newline at end of file
../../from_cpython/Lib/test/audiotest.au
\ No newline at end of file
../../from_cpython/Lib/test/bad_coding.py
\ No newline at end of file
../../from_cpython/Lib/test/bad_coding2.py
\ No newline at end of file
../../from_cpython/Lib/test/cfgparser.1
\ No newline at end of file
../../from_cpython/Lib/test/check_soundcard.vbs
\ No newline at end of file
../../from_cpython/Lib/test/cjkencodings
\ No newline at end of file
../../from_cpython/Lib/test/crashers
\ No newline at end of file
../../from_cpython/Lib/test/data
\ No newline at end of file
../../from_cpython/Lib/test/empty.vbs
\ No newline at end of file
../../from_cpython/Lib/test/greyrgb.uue
\ No newline at end of file
../../from_cpython/Lib/test/leakers
\ No newline at end of file
../../from_cpython/Lib/test/mp_fork_bomb.py
\ No newline at end of file
../../from_cpython/Lib/test/pickletester.py
\ No newline at end of file
../../from_cpython/Lib/test/randv2_32.pck
\ No newline at end of file
../../from_cpython/Lib/test/randv2_64.pck
\ No newline at end of file
../../from_cpython/Lib/test/randv3.pck
\ No newline at end of file
../../from_cpython/Lib/test/subprocessdata
\ No newline at end of file
../../from_cpython/Lib/test/test_difflib_expect.html
\ No newline at end of file
../../from_cpython/Lib/test/test_doctest.txt
\ No newline at end of file
../../from_cpython/Lib/test/test_doctest2.txt
\ No newline at end of file
../../from_cpython/Lib/test/test_doctest3.txt
\ No newline at end of file
../../from_cpython/Lib/test/test_doctest4.txt
\ No newline at end of file
# skip-if: IS_OPTIMIZED
# Skipping test_parser and test_all_fixers
# because of running
from lib2to3.tests import (test_fixers, test_pytree, test_util, test_refactor,
test_parser, test_main as test_main_)
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_parser,):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
../../from_cpython/Lib/test/testimg.uue
\ No newline at end of file
../../from_cpython/Lib/test/testimgr.uue
\ No newline at end of file
../../from_cpython/Lib/test/testrgb.uue
\ No newline at end of file
../../from_cpython/Lib/test/testtar.tar
\ No newline at end of file
../../from_cpython/Lib/test/tf_inherit_check.py
\ No newline at end of file
../../from_cpython/Lib/test/tokenize_tests.txt
\ No newline at end of file
../../from_cpython/Lib/test/tracedmodules
\ No newline at end of file
../../from_cpython/Lib/test/zipdir.zip
\ No newline at end of file
# run_args: -n # run_args: -n
# statcheck: ("-O" in EXTRA_JIT_ARGS) or (1 <= stats["num_osr_exits"] <= 2) # statcheck: ("-L" in EXTRA_JIT_ARGS) or (1 <= stats["num_osr_exits"] <= 2)
# statcheck: noninit_count('slowpath_binop') <= 10 # statcheck: noninit_count('slowpath_binop') <= 10
x = 100000 x = 100000
......
# skip-if: '-n' in EXTRA_JIT_ARGS or '-O' in EXTRA_JIT_ARGS # skip-if: '-n' in EXTRA_JIT_ARGS or '-L' in EXTRA_JIT_ARGS
# Tests to see if we add any extra refs to function arguments. # Tests to see if we add any extra refs to function arguments.
import sys import sys
......
# statcheck: '-O' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5 # statcheck: '-L' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5
def f(x): def f(x):
def inner(): def inner():
......
# skip-if: '-O' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS # skip-if: '-L' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS
# statcheck: 4 <= noninit_count('num_deopt') < 50 # statcheck: 4 <= noninit_count('num_deopt') < 50
# statcheck: 1 <= stats["num_osr_exits"] <= 2 # statcheck: 1 <= stats["num_osr_exits"] <= 2
......
# skip-if: '-O' in EXTRA_JIT_ARGS # skip-if: '-L' in EXTRA_JIT_ARGS
# expected: statfail # expected: statfail
# statcheck: 4 <= noninit_count('num_deopt') < 50 # statcheck: 4 <= noninit_count('num_deopt') < 50
# statcheck: 1 <= stats["num_osr_exits"] <= 2 # statcheck: 1 <= stats["num_osr_exits"] <= 2
......
# skip-if: '-O' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS # skip-if: '-L' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS
# statcheck: 4 <= noninit_count('num_deopt') < 50 # statcheck: 4 <= noninit_count('num_deopt') < 50
# statcheck: 1 <= stats["num_osr_exits"] <= 2 # statcheck: 1 <= stats["num_osr_exits"] <= 2
......
# skip-if: '-O' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS # skip-if: '-L' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS
# statcheck: 4 == noninit_count('num_deopt') # statcheck: 4 == noninit_count('num_deopt')
# this used to hit an abort in our LLVM tier codegen # this used to hit an abort in our LLVM tier codegen
try: try:
......
# skip-if: '-O' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS # skip-if: '-L' in EXTRA_JIT_ARGS or '-n' in EXTRA_JIT_ARGS
# statcheck: noninit_count('num_deopt') == 2 # statcheck: noninit_count('num_deopt') == 2
try: try:
......
# statcheck: ("-O" in EXTRA_JIT_ARGS) or (1 <= stats["num_osr_exits"] <= 2) # statcheck: ("-L" in EXTRA_JIT_ARGS) or (1 <= stats["num_osr_exits"] <= 2)
# While perhaps not required in practice, we should have the ability to # While perhaps not required in practice, we should have the ability to
# OSR from inside a list comprehension. # OSR from inside a list comprehension.
......
# statcheck: '-O' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5 # statcheck: '-L' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5
# "big osr" in terms of lots of live variables needing to be passed through: # "big osr" in terms of lots of live variables needing to be passed through:
......
# statcheck: '-O' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5 # statcheck: '-L' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5
# Try to trick the JIT into OSR'ing into an optimized version with a speculation # Try to trick the JIT into OSR'ing into an optimized version with a speculation
# that has already failed. # that has already failed.
......
# statcheck: '-O' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5 # statcheck: '-L' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5
# Regression test to make sure we can do an OSR if one of the live variables # Regression test to make sure we can do an OSR if one of the live variables
# is potentially-undefined. # is potentially-undefined.
......
# statcheck: '-O' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5 # statcheck: '-L' in EXTRA_JIT_ARGS or 1 <= stats['num_osr_exits'] <= 5
# Regression test to make sure we can do an OSR if one of the live variables # Regression test to make sure we can do an OSR if one of the live variables
# is potentially-undefined. # is potentially-undefined.
......
# skip-if: '-n' not in EXTRA_JIT_ARGS and '-O' not in EXTRA_JIT_ARGS # skip-if: '-n' not in EXTRA_JIT_ARGS and '-L' not in EXTRA_JIT_ARGS
# statcheck: noninit_count('slowpath_setattr') < 50 # statcheck: noninit_count('slowpath_setattr') < 50
class MyDescr(object): class MyDescr(object):
......
# skip-if: '-n' not in EXTRA_JIT_ARGS and '-O' not in EXTRA_JIT_ARGS
# disable this test because the interpreter (with disabled bjit) currently uses too much stack
# Make sure we can recurse at least 900 times on the three different types # Make sure we can recurse at least 900 times on the three different types
# of stacks that we have: # of stacks that we have:
import sys
TEST_DEPTH = sys.getrecursionlimit() - 20
def recurse(n): def recurse(n):
print n
if n > 0: if n > 0:
return recurse(n - 1) return recurse(n - 1)
return n return n
print "Recursing on main thread..." print "Recursing on main thread..."
recurse(900) recurse(TEST_DEPTH)
print "Recursing in a generator..." print "Recursing in a generator..."
def gen(): def gen():
yield recurse(900) yield recurse(TEST_DEPTH)
print list(gen()) print list(gen())
print "Recursing in a thread..." print "Recursing in a thread..."
...@@ -26,10 +25,17 @@ done = 0 ...@@ -26,10 +25,17 @@ done = 0
def thread_target(): def thread_target():
global done global done
recurse(900) recurse(TEST_DEPTH)
done = 1 done = 1
start_new_thread(thread_target, ()) start_new_thread(thread_target, ())
while not done: while not done:
time.sleep(0.001) time.sleep(0.001)
s = """
if depth < TEST_DEPTH:
depth += 1
exec s
"""
exec s in {'depth': 0, 's': s, 'TEST_DEPTH': TEST_DEPTH}
# skip-if: '-n' in EXTRA_JIT_ARGS or '-O' in EXTRA_JIT_ARGS # skip-if: '-n' in EXTRA_JIT_ARGS or '-L' in EXTRA_JIT_ARGS
# Make sure that we can fork from a threaded environment # Make sure that we can fork from a threaded environment
# #
# Running this test with -n or -O has horrible performance, since # Running this test with -n or -L has horrible performance, since
# we will freshly JIT all of the post-fork code after every fork. # we will freshly JIT all of the post-fork code after every fork.
import subprocess import subprocess
......
# fail-if: '-n' not in EXTRA_JIT_ARGS and '-O' not in EXTRA_JIT_ARGS # fail-if: '-n' not in EXTRA_JIT_ARGS and '-L' not in EXTRA_JIT_ARGS
# - This test needs type profiling to be enabled to trigger the bug # - This test needs type profiling to be enabled to trigger the bug
# This test throws an irgen assertion. # This test throws an irgen assertion.
......
# run_args: -n # run_args: -n
# #
# statcheck: "-O" in EXTRA_JIT_ARGS or 'slowpath_getclsattr' in stats or 'slowpath_callattr' in stats # statcheck: "-L" in EXTRA_JIT_ARGS or 'slowpath_getclsattr' in stats or 'slowpath_callattr' in stats
# statcheck: noninit_count('slowpath_getclsattr') <= 20 # statcheck: noninit_count('slowpath_getclsattr') <= 20
# statcheck: noninit_count('slowpath_callattr') <= 22 # statcheck: noninit_count('slowpath_callattr') <= 22
......
...@@ -188,6 +188,8 @@ def get_test_options(fn, check_stats, run_memcheck): ...@@ -188,6 +188,8 @@ def get_test_options(fn, check_stats, run_memcheck):
l = l.strip() l = l.strip()
if not l: if not l:
continue continue
if l.startswith("\xef\xbb\xbf"): # BOM
l = l[3:]
if not l.startswith("#"): if not l.startswith("#"):
break break
if l.startswith("# statcheck:"): if l.startswith("# statcheck:"):
...@@ -512,6 +514,7 @@ def main(orig_dir): ...@@ -512,6 +514,7 @@ def main(orig_dir):
global EXTMODULE_DIR_PYSTON global EXTMODULE_DIR_PYSTON
global EXTMODULE_DIR global EXTMODULE_DIR
global DISPLAY_SUCCESSES global DISPLAY_SUCCESSES
global IS_OPTIMIZED
run_memcheck = False run_memcheck = False
...@@ -535,6 +538,8 @@ def main(orig_dir): ...@@ -535,6 +538,8 @@ def main(orig_dir):
EXTMODULE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(IMAGE)) + "/test/test_extension/build/lib.linux-x86_64-2.7/") EXTMODULE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(IMAGE)) + "/test/test_extension/build/lib.linux-x86_64-2.7/")
patterns = opts.pattern patterns = opts.pattern
IS_OPTIMIZED = int(subprocess.check_output([IMAGE, "-c", 'import sysconfig; print int("-O0" not in sysconfig.get_config_var(\"CFLAGS\"))']))
if not patterns and not TESTS_TO_SKIP: if not patterns and not TESTS_TO_SKIP:
TESTS_TO_SKIP = ["t", "t2", "t3"] TESTS_TO_SKIP = ["t", "t2", "t3"]
...@@ -573,7 +578,7 @@ def main(orig_dir): ...@@ -573,7 +578,7 @@ def main(orig_dir):
filtered_tests.append(t) filtered_tests.append(t)
tests = filtered_tests tests = filtered_tests
if not tests: if not tests:
print >>sys.stderr, "No tests matched the given patterns. OK by me!" # print >>sys.stderr, "No tests matched the given patterns. OK by me!"
# this can happen legitimately in e.g. `make check_test_foo` if test_foo.py is a CPython regression test. # this can happen legitimately in e.g. `make check_test_foo` if test_foo.py is a CPython regression test.
sys.exit(0) sys.exit(0)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment