Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
N
neoppod
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Levin Zimmermann
neoppod
Commits
d46afb3e
Commit
d46afb3e
authored
Feb 06, 2018
by
Kirill Smelkov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
X neotest: Teach it to also run go & py unit tests; hook it into nxd/runTestSuite
parent
aa370ca3
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
89 additions
and
10 deletions
+89
-10
go/neo/t/neotest
go/neo/t/neotest
+34
-1
go/neo/t/nxd/runTestSuite
go/neo/t/nxd/runTestSuite
+55
-9
No files found.
go/neo/t/neotest
View file @
d46afb3e
...
...
@@ -137,6 +137,28 @@ $@
\"
"
}
# ---- go/py unit tests ----
cmd_test-go
()
{
go
test
lab.nexedi.com/kirr/neo/go/...
}
cmd_test-py
()
{
# NOTE testing with only sqlite should be ok to check for client correctness
NEO_TESTS_ADAPTER
=
SQLite python
-m
neo.scripts.runner
-ufz
}
cmd_test-local
()
{
cmd_test-go
cmd_test-py
}
cmd_test
()
{
url
=
"
$1
"
test
-z
"
$url
"
&&
die
"Usage neotest test [user@]<host>:<path>"
on
$url
./neotest test-local
}
# ---- net/fs setup + processes control/teardown ----
# init_net - initialize networking
...
...
@@ -1263,7 +1285,7 @@ cmd_cpustat() {
usage
()
{
cat
1>&2
<<
EOF
Neotest is a tool to
functionally
test and benchmark NEO.
Neotest is a tool to test and benchmark NEO.
Usage:
...
...
@@ -1271,6 +1293,13 @@ Usage:
The commands are:
test run all tests on a remote host
test-local run all tests locally
test-go run NEO/go unit tests (part of test-local)
test-py run NEO/py unit tests (part of test-local)
bench-local run all benchmarks when client and server are both on the same localhost
bench-cluster run all benchmarks when server is local and client is on another node
...
...
@@ -1282,6 +1311,7 @@ The commands are:
zbench-cluster run ZODB benchmarks via network (part of bench-cluster)
zbench-client run ZODB client benchmarks against separate server (part of zbench-cluster)
deploy deploy NEO & needed software for tests to remote host
deploy-local deploy NEO & needed software for tests locally
...
...
@@ -1296,6 +1326,9 @@ EOF
case
"
$1
"
in
# commands that require build
test-local
|
\
test-go
|
\
test-py
|
\
bench-local
|
\
bench-cluster
|
\
zbench-local
|
\
...
...
go/neo/t/nxd/runTestSuite
View file @
d46afb3e
...
...
@@ -25,7 +25,7 @@ neotest must be on $PATH.
from
erp5.util.taskdistribution
import
TaskDistributor
from
subprocess
import
Popen
,
PIPE
from
time
import
time
,
strftime
,
gmtime
import
os
,
sys
,
threading
,
argparse
,
logging
,
traceback
import
os
,
sys
,
threading
,
argparse
,
logging
,
traceback
,
re
def
main
():
...
...
@@ -53,7 +53,7 @@ def main():
tool
=
TaskDistributor
(
portal_url
=
args
.
master_url
,
logger
=
logger
)
test_result
=
tool
.
createTestResult
(
revision
=
args
.
revision
,
test_name_list
=
[
'bench-local'
],
test_name_list
=
[
'
test-go'
,
'test-py'
,
'
bench-local'
],
node_title
=
args
.
test_node_title
,
test_title
=
args
.
test_suite_title
or
args
.
test_suite
,
project_title
=
args
.
project_title
)
...
...
@@ -76,7 +76,8 @@ def main():
break
# run `neotest <test-name>`
argv
=
[
'neotest'
,
test_result_line
.
name
]
testname
=
test_result_line
.
name
argv
=
[
'neotest'
,
testname
]
tstart
=
time
()
try
:
...
...
@@ -102,8 +103,31 @@ def main():
p
.
wait
()
ok
=
(
p
.
returncode
==
0
)
tend
=
time
()
# default status dict just by exit code
status
=
{
'test_count'
:
1
,
'error_count'
:
(
0
if
ok
else
1
),
'failure_count'
:
0
,
'skip_count'
:
0
,
#html_test_result
}
# postprocess output, if we can
summaryf
=
globals
().
get
(
testname
.
replace
(
'-'
,
'_'
)
+
'_summary'
)
if
summaryf
is
not
None
:
try
:
summary
=
summaryf
(
stdout
)
except
:
bad
=
traceback
.
format_exc
()
sys
.
stderr
.
write
(
bad
)
stderr
+=
bad
status
[
'error_count'
]
+=
1
else
:
status
.
update
(
summary
)
tend
=
time
()
# report result of test run back to master
test_result_line
.
stop
(
...
...
@@ -114,11 +138,7 @@ def main():
stdout
=
stdout
,
stderr
=
stderr
,
test_count
=
1
,
error_count
=
(
0
if
ok
else
1
),
failure_count
=
0
,
skip_count
=
0
,
#html_test_result
**
status
)
# tee, similar to tee(1) utility, copies data from fin to fout appending them to buf.
...
...
@@ -140,5 +160,31 @@ def tee(fin, fout, buf):
buf
.
append
(
data
)
# xint converts number from neo/py test output to integer
def
xint
(
s
):
s
=
s
.
strip
()
if
s
==
'.'
:
return
0
else
:
return
int
(
s
)
# extract summary from neo/py test run
def
test_py_summary
(
stdout
):
# Test Module | run | unexpected | expected | skipped | time
# ...
# Summary | 366 | . | 9 | . | 353.47s
m
=
re
.
search
(
r'^\
s*summ
ary.*$'
,
stdout
,
re
.
M
|
re
.
I
)
assert
m
is
not
None
,
"could not find summary line"
summary
=
m
.
group
(
0
)
_
,
nrun
,
nfail
,
nxfail
,
nskip
,
_
=
summary
.
split
(
'|'
)
return
{
'test_count'
:
xint
(
nrun
),
'error_count'
:
xint
(
nfail
),
'failure_count'
:
xint
(
nxfail
),
'skip_count'
:
xint
(
nskip
),
}
if
__name__
==
'__main__'
:
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment