Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
slapos.toolbox
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Łukasz Nowak
slapos.toolbox
Commits
1c8269b2
Commit
1c8269b2
authored
Feb 04, 2019
by
Bryton Lacquement
🚪
Committed by
Julien Muchembled
Jun 27, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add support for Python 3
/reviewed-on
nexedi/slapos.toolbox!51
parent
eba2c149
Changes
61
Show whitespace changes
Inline
Side-by-side
Showing
61 changed files
with
590 additions
and
530 deletions
+590
-530
slapos/agent/agent.py
slapos/agent/agent.py
+5
-5
slapos/agent/tester.py
slapos/agent/tester.py
+13
-11
slapos/apachedex.py
slapos/apachedex.py
+10
-6
slapos/cachechecker.py
slapos/cachechecker.py
+8
-5
slapos/container/__init__.py
slapos/container/__init__.py
+5
-5
slapos/equeue/__init__.py
slapos/equeue/__init__.py
+10
-10
slapos/generatefeed.py
slapos/generatefeed.py
+1
-1
slapos/lamp/__init__.py
slapos/lamp/__init__.py
+12
-11
slapos/monitor/collect.py
slapos/monitor/collect.py
+37
-40
slapos/monitor/globalstate.py
slapos/monitor/globalstate.py
+16
-11
slapos/monitor/monitor.py
slapos/monitor/monitor.py
+33
-28
slapos/monitor/monitor_config_write.py
slapos/monitor/monitor_config_write.py
+19
-17
slapos/monitor/runpromise.py
slapos/monitor/runpromise.py
+3
-3
slapos/networkbench/__init__.py
slapos/networkbench/__init__.py
+8
-6
slapos/networkbench/http.py
slapos/networkbench/http.py
+4
-3
slapos/networkbench/ping.py
slapos/networkbench/ping.py
+3
-2
slapos/onetimedownload.py
slapos/onetimedownload.py
+3
-3
slapos/onetimeupload/__init__.py
slapos/onetimeupload/__init__.py
+1
-1
slapos/promise/apache_mpm_watchdog/__init__.py
slapos/promise/apache_mpm_watchdog/__init__.py
+4
-2
slapos/promise/check_apachedex_result/__init__.py
slapos/promise/check_apachedex_result/__init__.py
+1
-1
slapos/promise/check_computer_memory/__init__.py
slapos/promise/check_computer_memory/__init__.py
+12
-10
slapos/promise/check_slow_queries_digest_result/__init__.py
slapos/promise/check_slow_queries_digest_result/__init__.py
+4
-2
slapos/promise/check_user_memory/__init__.py
slapos/promise/check_user_memory/__init__.py
+4
-2
slapos/promise/check_web_page_http_cache_hit/__init__.py
slapos/promise/check_web_page_http_cache_hit/__init__.py
+11
-10
slapos/promise/is_local_tcp_port_opened/__init__.py
slapos/promise/is_local_tcp_port_opened/__init__.py
+2
-1
slapos/promise/is_process_older_than_dependency_set/__init__.py
.../promise/is_process_older_than_dependency_set/__init__.py
+7
-5
slapos/promise/plugin/backupserver_check_backup.py
slapos/promise/plugin/backupserver_check_backup.py
+17
-17
slapos/promise/plugin/check_error_on_http_log.py
slapos/promise/plugin/check_error_on_http_log.py
+10
-12
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
+5
-7
slapos/promise/plugin/check_file_state.py
slapos/promise/plugin/check_file_state.py
+2
-4
slapos/promise/plugin/check_free_disk_space.py
slapos/promise/plugin/check_free_disk_space.py
+6
-7
slapos/promise/plugin/check_icmp_packet_lost.py
slapos/promise/plugin/check_icmp_packet_lost.py
+2
-4
slapos/promise/plugin/check_partition_deployment_state.py
slapos/promise/plugin/check_partition_deployment_state.py
+2
-3
slapos/promise/plugin/check_re6st_optimal_status.py
slapos/promise/plugin/check_re6st_optimal_status.py
+2
-4
slapos/promise/plugin/check_server_cpu_load.py
slapos/promise/plugin/check_server_cpu_load.py
+4
-5
slapos/promise/plugin/check_url_available.py
slapos/promise/plugin/check_url_available.py
+4
-6
slapos/promise/plugin/monitor_bootstrap_status.py
slapos/promise/plugin/monitor_bootstrap_status.py
+3
-4
slapos/pubsub/__init__.py
slapos/pubsub/__init__.py
+1
-1
slapos/qemuqmpclient/__init__.py
slapos/qemuqmpclient/__init__.py
+41
-39
slapos/resilient/runner_exporter.py
slapos/resilient/runner_exporter.py
+4
-2
slapos/resilient/runner_utils.py
slapos/resilient/runner_utils.py
+6
-3
slapos/runner/runnertest.py
slapos/runner/runnertest.py
+12
-9
slapos/runner/sup_process.py
slapos/runner/sup_process.py
+1
-1
slapos/runner/utils.py
slapos/runner/utils.py
+26
-21
slapos/runner/views.py
slapos/runner/views.py
+14
-16
slapos/securedelete.py
slapos/securedelete.py
+5
-2
slapos/systool.py
slapos/systool.py
+4
-2
slapos/test/monitor/test_config_document.py
slapos/test/monitor/test_config_document.py
+14
-8
slapos/test/monitor/testbootstrap.py
slapos/test/monitor/testbootstrap.py
+4
-4
slapos/test/monitor/testglobalstate.py
slapos/test/monitor/testglobalstate.py
+4
-5
slapos/test/monitor/testrunpromise.py
slapos/test/monitor/testrunpromise.py
+42
-28
slapos/test/promise/plugin/test_check_file_state.py
slapos/test/promise/plugin/test_check_file_state.py
+9
-4
slapos/test/promise/plugin/test_check_url_available.py
slapos/test/promise/plugin/test_check_url_available.py
+27
-21
slapos/test/promise/test_apache_mpm_watchdog.py
slapos/test/promise/test_apache_mpm_watchdog.py
+1
-1
slapos/test/test_agent.py
slapos/test/test_agent.py
+55
-53
slapos/test/test_checkfeedaspromise.py
slapos/test/test_checkfeedaspromise.py
+4
-4
slapos/test/test_generatefeed.py
slapos/test/test_generatefeed.py
+3
-2
slapos/test/test_qemuqmpclient.py
slapos/test/test_qemuqmpclient.py
+2
-2
slapos/test/test_runner.py
slapos/test/test_runner.py
+0
-9
slapos/test/test_runner_exporter.py
slapos/test/test_runner_exporter.py
+5
-6
slapos/test/test_securedelete.py
slapos/test/test_securedelete.py
+13
-13
No files found.
slapos/agent/agent.py
View file @
1c8269b2
...
@@ -25,7 +25,7 @@
...
@@ -25,7 +25,7 @@
#
#
##############################################################################
##############################################################################
import
ConfigP
arser
from
six.moves
import
configp
arser
import
argparse
import
argparse
import
collections
import
collections
import
json
import
json
...
@@ -57,7 +57,7 @@ class AutoSTemp(object):
...
@@ -57,7 +57,7 @@ class AutoSTemp(object):
def
__init__
(
self
,
value
):
def
__init__
(
self
,
value
):
fd
,
self
.
__name
=
tempfile
.
mkstemp
()
fd
,
self
.
__name
=
tempfile
.
mkstemp
()
os
.
write
(
fd
,
value
)
os
.
write
(
fd
,
value
.
encode
(
'utf-8'
)
)
os
.
close
(
fd
)
os
.
close
(
fd
)
@
property
@
property
...
@@ -67,7 +67,7 @@ class AutoSTemp(object):
...
@@ -67,7 +67,7 @@ class AutoSTemp(object):
def
__del__
(
self
):
def
__del__
(
self
):
self
.
__unlink
(
self
.
__name
)
self
.
__unlink
(
self
.
__name
)
from
tester
import
SoftwareReleaseTester
from
.
tester
import
SoftwareReleaseTester
class
TestMap
(
object
):
class
TestMap
(
object
):
# tell pytest to skip this class (even if name starts with Test)
# tell pytest to skip this class (even if name starts with Test)
...
@@ -94,7 +94,7 @@ class TestMap(object):
...
@@ -94,7 +94,7 @@ class TestMap(object):
return
set
(
exclude_list
+
list
(
self
.
ran_test_set
))
return
set
(
exclude_list
+
list
(
self
.
ran_test_set
))
def
getGroupList
(
self
):
def
getGroupList
(
self
):
return
self
.
test_map_dict
.
keys
(
)
return
list
(
self
.
test_map_dict
)
def
dropGroup
(
self
,
group
):
def
dropGroup
(
self
,
group
):
del
self
.
test_map_dict
[
group
]
del
self
.
test_map_dict
[
group
]
...
@@ -217,7 +217,7 @@ def main():
...
@@ -217,7 +217,7 @@ def main():
logger
,
log_file
=
getLogger
(
log
,
args
.
verbose
)
logger
,
log_file
=
getLogger
(
log
,
args
.
verbose
)
configuration
=
ConfigP
arser
.
SafeConfigParser
()
configuration
=
configp
arser
.
SafeConfigParser
()
configuration
.
readfp
(
args
.
configuration_file
)
configuration
.
readfp
(
args
.
configuration_file
)
pidfile
=
args
.
pidfile
pidfile
=
args
.
pidfile
...
...
slapos/agent/tester.py
View file @
1c8269b2
from
__future__
import
print_function
import
datetime
import
datetime
import
json
import
json
import
sys
import
sys
...
@@ -50,16 +52,16 @@ def retryOnNetworkFailure(func):
...
@@ -50,16 +52,16 @@ def retryOnNetworkFailure(func):
while
True
:
while
True
:
try
:
try
:
return
func
(
*
args
,
**
kwargs
)
return
func
(
*
args
,
**
kwargs
)
except
SAFE_RPC_EXCEPTION_LIST
,
e
:
except
SAFE_RPC_EXCEPTION_LIST
as
e
:
print
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
print
(
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
)
except
HTTPError
,
e
:
except
HTTPError
as
e
:
print
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
print
(
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
)
except
ConnectionError
,
e
:
except
ConnectionError
as
e
:
print
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
print
(
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
)
except
slapos
.
slap
.
ConnectionError
,
e
:
except
slapos
.
slap
.
ConnectionError
as
e
:
print
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
print
(
'Network failure: %s , %s'
%
(
sys
.
exc_info
(),
e
)
)
print
'Retry method %s in %i seconds'
%
(
func
,
retry_time
)
print
(
'Retry method %s in %i seconds'
%
(
func
,
retry_time
)
)
time
.
sleep
(
retry_time
)
time
.
sleep
(
retry_time
)
retry_time
=
min
(
retry_time
*
1.5
,
640
)
retry_time
=
min
(
retry_time
*
1.5
,
640
)
...
@@ -218,7 +220,7 @@ class SlapOSMasterCommunicator(object):
...
@@ -218,7 +220,7 @@ class SlapOSMasterCommunicator(object):
result
=
self
.
hateoas_navigator
.
GET
(
url
)
result
=
self
.
hateoas_navigator
.
GET
(
url
)
result
=
json
.
loads
(
result
)
result
=
json
.
loads
(
result
)
if
result
[
'_links'
].
get
(
'action_object_slap'
,
None
)
is
None
:
if
result
[
'_links'
].
get
(
'action_object_slap'
,
None
)
is
None
:
print
result
[
'links'
]
print
(
result
[
'links'
])
return
None
return
None
object_link
=
self
.
hateoas_navigator
.
hateoasGetLinkFromLinks
(
object_link
=
self
.
hateoas_navigator
.
hateoasGetLinkFromLinks
(
...
...
slapos/apachedex.py
View file @
1c8269b2
...
@@ -27,6 +27,8 @@
...
@@ -27,6 +27,8 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
os
,
errno
import
os
,
errno
import
subprocess
import
subprocess
import
argparse
import
argparse
...
@@ -52,7 +54,7 @@ def build_command(apachedex_executable, output_file,
...
@@ -52,7 +54,7 @@ def build_command(apachedex_executable, output_file,
# Automaticaly replace variable 'date'.
# Automaticaly replace variable 'date'.
apache_log
=
logfile
.
strip
()
%
{
'date'
:
today
}
apache_log
=
logfile
.
strip
()
%
{
'date'
:
today
}
if
not
os
.
path
.
exists
(
apache_log
):
if
not
os
.
path
.
exists
(
apache_log
):
print
"WARNING: File %s not found..."
%
apache_log
print
(
"WARNING: File %s not found..."
%
apache_log
)
continue
continue
log_list
.
append
(
apache_log
)
log_list
.
append
(
apache_log
)
if
not
log_list
:
if
not
log_list
:
...
@@ -81,7 +83,7 @@ def main():
...
@@ -81,7 +83,7 @@ def main():
base_url
=
args
.
base_url
.
strip
()
base_url
=
args
.
base_url
.
strip
()
if
not
os
.
path
.
exists
(
output_folder
)
or
not
os
.
path
.
isdir
(
output_folder
):
if
not
os
.
path
.
exists
(
output_folder
)
or
not
os
.
path
.
isdir
(
output_folder
):
print
"ERROR: Output folder is not a directory. Exiting..."
print
(
"ERROR: Output folder is not a directory. Exiting..."
)
return
1
return
1
today
=
date
.
today
().
strftime
(
"%Y-%m-%d"
)
today
=
date
.
today
().
strftime
(
"%Y-%m-%d"
)
...
@@ -93,21 +95,23 @@ def main():
...
@@ -93,21 +95,23 @@ def main():
args
.
apache_log_list
,
args
.
apache_log_list
,
config
)
config
)
except
ValueError
as
e
:
except
ValueError
as
e
:
print
e
print
(
e
)
return
1
return
1
process_handler
=
subprocess
.
Popen
(
argument_list
,
process_handler
=
subprocess
.
Popen
(
argument_list
,
stdout
=
subprocess
.
PIPE
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
,
universal_newlines
=
True
,
)
)
stdout
,
stderr
=
process_handler
.
communicate
()
stdout
,
stderr
=
process_handler
.
communicate
()
if
process_handler
.
returncode
!=
0
:
if
process_handler
.
returncode
!=
0
:
if
stderr
:
if
stderr
:
print
stderr
print
(
stderr
)
return
1
return
1
with
open
(
output_file
,
'r'
)
as
f
:
# Check that output_file is a readable file.
print
base_url
+
'/ApacheDex-%s.html'
%
today
with
open
(
output_file
,
'r'
):
print
(
base_url
+
'/ApacheDex-%s.html'
%
today
)
return
0
return
0
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
slapos/cachechecker.py
View file @
1c8269b2
...
@@ -27,6 +27,8 @@
...
@@ -27,6 +27,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
os
import
os
import
shutil
import
shutil
import
sys
import
sys
...
@@ -35,6 +37,7 @@ import shlex
...
@@ -35,6 +37,7 @@ import shlex
from
subprocess
import
Popen
,
PIPE
,
STDOUT
from
subprocess
import
Popen
,
PIPE
,
STDOUT
import
logging
import
logging
from
datetime
import
datetime
from
datetime
import
datetime
import
six
_MARKER
=
[]
_MARKER
=
[]
WGET
=
'wget'
WGET
=
'wget'
...
@@ -428,7 +431,7 @@ class HTTPCacheCheckerTestSuite(object):
...
@@ -428,7 +431,7 @@ class HTTPCacheCheckerTestSuite(object):
if
self
.
report_dict
:
if
self
.
report_dict
:
report_message_list
=
[
'*Errors*:'
]
report_message_list
=
[
'*Errors*:'
]
for
url
,
message_list
in
s
elf
.
report_dict
.
iteritems
(
):
for
url
,
message_list
in
s
ix
.
iteritems
(
self
.
report_dict
):
unique_message_list
=
[]
unique_message_list
=
[]
for
message
in
message_list
:
for
message
in
message_list
:
if
message
not
in
unique_message_list
:
if
message
not
in
unique_message_list
:
...
@@ -447,7 +450,7 @@ class HTTPCacheCheckerTestSuite(object):
...
@@ -447,7 +450,7 @@ class HTTPCacheCheckerTestSuite(object):
from
optparse
import
OptionParser
from
optparse
import
OptionParser
import
ConfigParser
from
six.moves.configparser
import
Raw
ConfigParser
def
_formatConfiguration
(
configuration
):
def
_formatConfiguration
(
configuration
):
""" format the configuration"""
""" format the configuration"""
...
@@ -463,11 +466,11 @@ def web_checker_utility():
...
@@ -463,11 +466,11 @@ def web_checker_utility():
(
options
,
args
)
=
parser
.
parse_args
()
(
options
,
args
)
=
parser
.
parse_args
()
if
len
(
args
)
!=
1
:
if
len
(
args
)
!=
1
:
print
parser
.
print_help
(
)
print
(
parser
.
print_help
()
)
parser
.
error
(
'incorrect number of arguments'
)
parser
.
error
(
'incorrect number of arguments'
)
config_path
=
args
[
0
]
config_path
=
args
[
0
]
config
=
ConfigParser
.
RawConfigParser
()
config
=
RawConfigParser
()
config
.
read
(
config_path
)
config
.
read
(
config_path
)
working_directory
=
config
.
get
(
'web_checker'
,
'working_directory'
)
working_directory
=
config
.
get
(
'web_checker'
,
'working_directory'
)
...
@@ -525,7 +528,7 @@ def web_checker_utility():
...
@@ -525,7 +528,7 @@ def web_checker_utility():
file_object.write(result)
file_object.write(result)
file_object.close()
file_object.close()
else:
else:
print
result
print
(result)
if __name__ == '__main__':
if __name__ == '__main__':
sys.exit(web_checker_utility())
sys.exit(web_checker_utility())
slapos/container/__init__.py
View file @
1c8269b2
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import
ConfigP
arser
from
six.moves
import
configp
arser
import
argparse
import
argparse
import
gdbm
from
six.moves
import
dbm_gnu
as
gdbm
import
sys
import
sys
import
os
import
os
...
@@ -41,7 +41,7 @@ def main():
...
@@ -41,7 +41,7 @@ def main():
run
(
args
)
run
(
args
)
def
run
(
args
):
def
run
(
args
):
slapos_conf
=
ConfigP
arser
.
ConfigParser
()
slapos_conf
=
configp
arser
.
ConfigParser
()
slapos_conf
.
read
(
args
.
configuration_file
)
slapos_conf
.
read
(
args
.
configuration_file
)
current_binary
=
os
.
path
.
join
(
os
.
getcwd
(),
sys
.
argv
[
0
])
current_binary
=
os
.
path
.
join
(
os
.
getcwd
(),
sys
.
argv
[
0
])
...
@@ -52,7 +52,7 @@ def run(args):
...
@@ -52,7 +52,7 @@ def run(args):
partition_base_name
=
slapos_conf
.
get
(
'slapformat'
,
'partition_base_name'
)
partition_base_name
=
slapos_conf
.
get
(
'slapformat'
,
'partition_base_name'
)
try
:
try
:
bridge_name
=
slapos_conf
.
get
(
'slapformat'
,
'interface_name'
)
bridge_name
=
slapos_conf
.
get
(
'slapformat'
,
'interface_name'
)
except
ConfigP
arser
.
NoOptionError
:
except
configp
arser
.
NoOptionError
:
bridge_name
=
slapos_conf
.
get
(
'slapformat'
,
'bridge_name'
)
bridge_name
=
slapos_conf
.
get
(
'slapformat'
,
'bridge_name'
)
instance_root
=
slapos_conf
.
get
(
'slapos'
,
'instance_root'
)
instance_root
=
slapos_conf
.
get
(
'slapos'
,
'instance_root'
)
partition_base_path
=
os
.
path
.
join
(
instance_root
,
partition_base_name
)
partition_base_path
=
os
.
path
.
join
(
instance_root
,
partition_base_name
)
...
@@ -61,7 +61,7 @@ def run(args):
...
@@ -61,7 +61,7 @@ def run(args):
logging
.
basicConfig
(
level
=
logging
.
getLevelName
(
args
.
log
[
0
]))
logging
.
basicConfig
(
level
=
logging
.
getLevelName
(
args
.
log
[
0
]))
database
=
gdbm
.
open
(
args
.
database
,
'c'
,
0600
)
database
=
gdbm
.
open
(
args
.
database
,
'c'
,
0
o
600
)
try
:
try
:
process
.
main
(
sr_directory
,
partition_list
,
database
,
bridge_name
)
process
.
main
(
sr_directory
,
partition_list
,
database
,
bridge_name
)
finally
:
finally
:
...
...
slapos/equeue/__init__.py
View file @
1c8269b2
...
@@ -28,7 +28,7 @@
...
@@ -28,7 +28,7 @@
import
argparse
import
argparse
import
errno
import
errno
import
gdbm
from
six.moves
import
dbm_gnu
as
gdbm
import
json
import
json
from
lockfile
import
LockFile
from
lockfile
import
LockFile
import
logging
import
logging
...
@@ -38,8 +38,8 @@ import signal
...
@@ -38,8 +38,8 @@ import signal
import
socket
import
socket
import
subprocess
import
subprocess
import
sys
import
sys
import
SocketS
erver
from
six.moves
import
sockets
erver
import
StringIO
import
io
import
threading
import
threading
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
# Copied from erp5.util:erp5/util/testnode/ProcessManager.py
...
@@ -75,13 +75,13 @@ def subprocess_capture(p, log, log_prefix, get_output=True):
...
@@ -75,13 +75,13 @@ def subprocess_capture(p, log, log_prefix, get_output=True):
return
(
p
.
stdout
and
''
.
join
(
stdout
),
return
(
p
.
stdout
and
''
.
join
(
stdout
),
p
.
stderr
and
''
.
join
(
stderr
))
p
.
stderr
and
''
.
join
(
stderr
))
class
EqueueServer
(
SocketS
erver
.
ThreadingUnixStreamServer
):
class
EqueueServer
(
sockets
erver
.
ThreadingUnixStreamServer
):
daemon_threads
=
True
daemon_threads
=
True
def
__init__
(
self
,
*
args
,
**
kw
):
def
__init__
(
self
,
*
args
,
**
kw
):
self
.
options
=
kw
.
pop
(
'equeue_options'
)
self
.
options
=
kw
.
pop
(
'equeue_options'
)
SocketServer
.
ThreadingUnixStreamServer
.
__init__
(
self
,
super
(
EqueueServer
,
self
)
.
__init__
(
self
,
RequestHandlerClass
=
None
,
RequestHandlerClass
=
None
,
*
args
,
**
kw
)
*
args
,
**
kw
)
# Equeue Specific elements
# Equeue Specific elements
...
@@ -106,7 +106,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
...
@@ -106,7 +106,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
self
.
logger
.
addHandler
(
handler
)
self
.
logger
.
addHandler
(
handler
)
def
setDB
(
self
,
database
):
def
setDB
(
self
,
database
):
self
.
db
=
gdbm
.
open
(
database
,
'cs'
,
0700
)
self
.
db
=
gdbm
.
open
(
database
,
'cs'
,
0
o
700
)
def
_hasTakeoverBeenTriggered
(
self
):
def
_hasTakeoverBeenTriggered
(
self
):
if
hasattr
(
self
,
'takeover_triggered_file_path'
)
and
\
if
hasattr
(
self
,
'takeover_triggered_file_path'
)
and
\
...
@@ -149,7 +149,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
...
@@ -149,7 +149,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
# Handle request
# Handle request
self
.
logger
.
debug
(
"Connection with file descriptor %d"
,
request
.
fileno
())
self
.
logger
.
debug
(
"Connection with file descriptor %d"
,
request
.
fileno
())
request
.
settimeout
(
self
.
options
.
timeout
)
request
.
settimeout
(
self
.
options
.
timeout
)
request_string
=
StringIO
.
StringIO
()
request_string
=
io
.
StringIO
()
segment
=
None
segment
=
None
try
:
try
:
while
segment
!=
''
:
while
segment
!=
''
:
...
@@ -181,7 +181,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
...
@@ -181,7 +181,7 @@ class EqueueServer(SocketServer.ThreadingUnixStreamServer):
def
remove_existing_file
(
path
):
def
remove_existing_file
(
path
):
try
:
try
:
os
.
remove
(
path
)
os
.
remove
(
path
)
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
raise
...
...
slapos/generatefeed.py
View file @
1c8269b2
...
@@ -76,7 +76,7 @@ def generateFeed(option):
...
@@ -76,7 +76,7 @@ def generateFeed(option):
# Reduces feed if number of items exceeds max_item
# Reduces feed if number of items exceeds max_item
if
len
(
item_dict
)
>
option
.
max_item
:
if
len
(
item_dict
)
>
option
.
max_item
:
outdated_key_list
=
sorted_item_dict
.
keys
(
)[:
-
option
.
max_item
]
outdated_key_list
=
list
(
sorted_item_dict
)[:
-
option
.
max_item
]
for
outdated_key
in
outdated_key_list
:
for
outdated_key
in
outdated_key_list
:
del
sorted_item_dict
[
outdated_key
]
del
sorted_item_dict
[
outdated_key
]
deleteFileList
(
outdated_key_list
)
deleteFileList
(
outdated_key_list
)
...
...
slapos/lamp/__init__.py
View file @
1c8269b2
...
@@ -12,6 +12,7 @@
...
@@ -12,6 +12,7 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
os
import
os
import
time
import
time
...
@@ -76,7 +77,7 @@ def run():
...
@@ -76,7 +77,7 @@ def run():
result
=
parser
.
parse_args
()
result
=
parser
.
parse_args
()
arguments
=
dict
(
result
.
_get_kwargs
())
arguments
=
dict
(
result
.
_get_kwargs
())
if
arguments
[
'token'
]
==
None
and
arguments
[
'file_token'
]
==
None
:
if
arguments
[
'token'
]
==
None
and
arguments
[
'file_token'
]
==
None
:
print
"lampconfigure: Error: Please specify where condition will be taken, use -d or -f option"
print
(
"lampconfigure: Error: Please specify where condition will be taken, use -d or -f option"
)
return
return
setup
(
arguments
)
setup
(
arguments
)
...
@@ -84,7 +85,7 @@ def setup(arguments):
...
@@ -84,7 +85,7 @@ def setup(arguments):
timeout
=
5
;
timeout
=
5
;
while
True
:
while
True
:
if
not
checkAction
(
arguments
):
if
not
checkAction
(
arguments
):
print
"Waiting for 3s and retrying"
print
(
"Waiting for 3s and retrying"
)
time
.
sleep
(
3
)
time
.
sleep
(
3
)
continue
continue
time
.
sleep
(
timeout
)
time
.
sleep
(
timeout
)
...
@@ -115,9 +116,9 @@ def checkAction(arguments):
...
@@ -115,9 +116,9 @@ def checkAction(arguments):
user
=
arguments
[
'mysql_user'
],
user
=
arguments
[
'mysql_user'
],
passwd
=
arguments
[
'mysql_password'
],
passwd
=
arguments
[
'mysql_password'
],
db
=
arguments
[
'token'
])
db
=
arguments
[
'token'
])
except
Exception
,
ex
:
except
Exception
as
e
:
#Mysql is not ready yet?...
#Mysql is not ready yet?...
print
ex
.
message
print
(
e
)
return
False
return
False
if
arguments
[
'table'
]
==
"**"
:
if
arguments
[
'table'
]
==
"**"
:
#only detect if mysql has been started
#only detect if mysql has been started
...
@@ -145,7 +146,7 @@ def rename(arguments):
...
@@ -145,7 +146,7 @@ def rename(arguments):
source
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'source'
])
source
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'source'
])
destination
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'destination'
])
destination
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'destination'
])
if
not
os
.
path
.
exists
(
source
):
if
not
os
.
path
.
exists
(
source
):
print
"Error when moving: '%s': no such file or directory"
%
source
print
(
"Error when moving: '%s': no such file or directory"
%
source
)
return
return
os
.
rename
(
source
,
destination
)
os
.
rename
(
source
,
destination
)
if
arguments
[
'mode'
]
!=
None
:
if
arguments
[
'mode'
]
!=
None
:
...
@@ -155,7 +156,7 @@ def delete(arguments):
...
@@ -155,7 +156,7 @@ def delete(arguments):
for
path
in
arguments
[
'delete_target'
]:
for
path
in
arguments
[
'delete_target'
]:
path
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
path
)
path
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
path
)
if
not
os
.
path
.
exists
(
path
):
if
not
os
.
path
.
exists
(
path
):
print
"Error when deleting: '%s': no such file or directory"
%
path
print
(
"Error when deleting: '%s': no such file or directory"
%
path
)
continue
continue
if
os
.
path
.
isdir
(
path
):
if
os
.
path
.
isdir
(
path
):
shutil
.
rmtree
(
path
)
shutil
.
rmtree
(
path
)
...
@@ -164,7 +165,7 @@ def delete(arguments):
...
@@ -164,7 +165,7 @@ def delete(arguments):
def
run_script
(
arguments
):
def
run_script
(
arguments
):
script
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'script'
])
script
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'script'
])
print
'Running script: %s'
%
script
print
(
'Running script: %s'
%
script
)
if
os
.
path
.
exists
(
script
):
if
os
.
path
.
exists
(
script
):
import
subprocess
import
subprocess
#run python script with predefined data
#run python script with predefined data
...
@@ -176,12 +177,12 @@ def run_script(arguments):
...
@@ -176,12 +177,12 @@ def run_script(arguments):
result
=
subprocess
.
Popen
(
data
,
env
=
{
'PYTHONPATH'
:
':'
.
join
(
sys
.
path
)})
result
=
subprocess
.
Popen
(
data
,
env
=
{
'PYTHONPATH'
:
':'
.
join
(
sys
.
path
)})
result
.
wait
()
result
.
wait
()
else
:
else
:
print
"Error: can not read file '%s'"
%
script
print
(
"Error: can not read file '%s'"
%
script
)
def
run_sql_script
(
arguments
):
def
run_sql_script
(
arguments
):
script
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'sql_script'
])
script
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
arguments
[
'sql_script'
])
print
'Running SQL script: %s'
%
script
print
(
'Running SQL script: %s'
%
script
)
if
os
.
path
.
exists
(
script
):
if
os
.
path
.
exists
(
script
):
conn
=
MySQLdb
.
connect
(
host
=
arguments
[
'mysql_host'
],
conn
=
MySQLdb
.
connect
(
host
=
arguments
[
'mysql_host'
],
port
=
int
(
arguments
[
'mysql_port'
]),
port
=
int
(
arguments
[
'mysql_port'
]),
...
@@ -196,7 +197,7 @@ def run_sql_script(arguments):
...
@@ -196,7 +197,7 @@ def run_sql_script(arguments):
conn
.
close
()
conn
.
close
()
else
:
else
:
print
"Error: can not read file '%s'"
%
script
print
(
"Error: can not read file '%s'"
%
script
)
...
@@ -204,6 +205,6 @@ def chmod(arguments):
...
@@ -204,6 +205,6 @@ def chmod(arguments):
for
path
in
arguments
[
'chmod_target'
]:
for
path
in
arguments
[
'chmod_target'
]:
path
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
path
)
path
=
os
.
path
.
join
(
arguments
[
'target_directory'
],
path
)
if
not
os
.
path
.
exists
(
path
):
if
not
os
.
path
.
exists
(
path
):
print
"Error when changing mode: '%s': no such file or directory"
%
path
print
(
"Error when changing mode: '%s': no such file or directory"
%
path
)
continue
continue
os
.
chmod
(
path
,
int
(
arguments
[
'mode'
],
8
))
os
.
chmod
(
path
,
int
(
arguments
[
'mode'
],
8
))
slapos/monitor/collect.py
View file @
1c8269b2
...
@@ -27,6 +27,8 @@
...
@@ -27,6 +27,8 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
division
import
sqlite3
import
sqlite3
import
os
import
os
import
pwd
import
pwd
...
@@ -80,19 +82,17 @@ class ResourceCollect:
...
@@ -80,19 +82,17 @@ class ResourceCollect:
table
=
"sqlite_master"
,
table
=
"sqlite_master"
,
columns
=
'name'
,
columns
=
'name'
,
where
=
"type='table' AND name='%s'"
%
name
)
where
=
"type='table' AND name='%s'"
%
name
)
table_exists_result
=
zip
(
*
check_result_cursor
)
r
=
check_result_cursor
.
fetchone
()
if
not
len
(
table_exists_result
)
or
table_exists_result
[
0
][
0
]
is
None
:
return
r
and
r
[
0
]
is
not
None
return
False
return
True
def
getPartitionCPULoadAverage
(
self
,
partition_id
,
date_scope
):
def
getPartitionCPULoadAverage
(
self
,
partition_id
,
date_scope
):
return
self
.
consumption_utils
.
getPartitionCPULoadAverage
(
partition_id
,
date_scope
)
return
self
.
consumption_utils
.
getPartitionCPULoadAverage
(
partition_id
,
date_scope
)
def
getPartitionUsedMemoryAverage
(
self
,
partition_id
,
date_scope
):
def
getPartitionUsedMemoryAverage
(
self
,
partition_id
,
date_scope
):
return
self
.
consumption_utils
.
getPartitionUsedMemoryAverage
(
partition_id
,
date_scope
)
/
(
1024
*
1024
.0
)
return
self
.
consumption_utils
.
getPartitionUsedMemoryAverage
(
partition_id
,
date_scope
)
/
(
1024
*
1024
)
def
getPartitionDiskUsedAverage
(
self
,
partition_id
,
date_scope
):
def
getPartitionDiskUsedAverage
(
self
,
partition_id
,
date_scope
):
return
self
.
consumption_utils
.
getPartitionDiskUsedAverage
(
partition_id
,
date_scope
)
/
1024
.0
return
self
.
consumption_utils
.
getPartitionDiskUsedAverage
(
partition_id
,
date_scope
)
/
1024
def
getPartitionConsumption
(
self
,
partition_id
,
where
=
""
,
date_scope
=
None
,
min_time
=
None
,
max_time
=
None
):
def
getPartitionConsumption
(
self
,
partition_id
,
where
=
""
,
date_scope
=
None
,
min_time
=
None
,
max_time
=
None
):
"""
"""
...
@@ -123,10 +123,10 @@ class ResourceCollect:
...
@@ -123,10 +123,10 @@ class ResourceCollect:
resource_dict
=
{
resource_dict
=
{
'pid'
:
result
[
6
],
'pid'
:
result
[
6
],
'cpu_percent'
:
round
(
result
[
1
]
/
count
,
2
),
'cpu_percent'
:
round
(
result
[
1
]
/
count
,
2
),
'cpu_time'
:
round
((
result
[
2
]
or
0
)
/
(
60
.0
),
2
),
'cpu_time'
:
round
((
result
[
2
]
or
0
)
/
(
60
),
2
),
'cpu_num_threads'
:
round
(
result
[
3
]
/
count
,
2
),
'cpu_num_threads'
:
round
(
result
[
3
]
/
count
,
2
),
'memory_percent'
:
round
(
result
[
4
]
/
count
,
2
),
'memory_percent'
:
round
(
result
[
4
]
/
count
,
2
),
'memory_rss'
:
round
((
result
[
5
]
or
0
)
/
(
1024
*
1024
.0
),
2
),
'memory_rss'
:
round
((
result
[
5
]
or
0
)
/
(
1024
*
1024
),
2
),
'io_rw_counter'
:
round
(
result
[
7
]
/
count
,
2
),
'io_rw_counter'
:
round
(
result
[
7
]
/
count
,
2
),
'io_cycles_counter'
:
round
(
result
[
8
]
/
count
,
2
)
'io_cycles_counter'
:
round
(
result
[
8
]
/
count
,
2
)
}
}
...
@@ -159,23 +159,20 @@ class ResourceCollect:
...
@@ -159,23 +159,20 @@ class ResourceCollect:
query_result
=
self
.
db
.
select
(
'user'
,
date_scope
,
colums
,
query_result
=
self
.
db
.
select
(
'user'
,
date_scope
,
colums
,
where
=
"partition='%s' and (time between '%s' and '%s') %s"
%
where
=
"partition='%s' and (time between '%s' and '%s') %s"
%
(
partition_id
,
min_time
,
max_time
,
where
))
(
partition_id
,
min_time
,
max_time
,
where
))
result_list
=
zip
(
*
query_result
)
result
=
query_result
.
fetchone
()
process_dict
=
memory_dict
=
io_dict
=
{}
process_dict
=
{
'total_process'
:
result
[
0
],
if
len
(
result_list
):
'cpu_percent'
:
round
((
result
[
1
]
or
0
),
2
),
result
=
result_list
'cpu_time'
:
round
((
result
[
2
]
or
0
)
/
(
60
),
2
),
process_dict
=
{
'total_process'
:
result
[
0
][
0
],
'cpu_num_threads'
:
round
((
result
[
3
]
or
0
),
2
),
'cpu_percent'
:
round
((
result
[
1
][
0
]
or
0
),
2
),
'cpu_time'
:
round
((
result
[
2
][
0
]
or
0
)
/
(
60.0
),
2
),
'cpu_num_threads'
:
round
((
result
[
3
][
0
]
or
0
),
2
),
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
}
}
memory_dict
=
{
'memory_percent'
:
round
((
result
[
4
][
0
]
or
0
),
2
),
memory_dict
=
{
'memory_percent'
:
round
((
result
[
4
]
or
0
),
2
),
'memory_rss'
:
round
((
result
[
5
][
0
]
or
0
)
/
(
1024
*
1024.0
),
2
),
'memory_rss'
:
round
((
result
[
5
]
or
0
)
/
(
1024
*
1024
),
2
),
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
}
}
io_dict
=
{
'io_rw_counter'
:
round
((
result
[
6
][
0
]
or
0
),
2
),
io_dict
=
{
'io_rw_counter'
:
round
((
result
[
6
]
or
0
),
2
),
'io_cycles_counter'
:
round
((
result
[
7
][
0
]
or
0
),
2
),
'io_cycles_counter'
:
round
((
result
[
7
]
or
0
),
2
),
'disk_used'
:
0
,
'disk_used'
:
0
,
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
'date'
:
'%s %s'
%
(
date_scope
,
min_time
)
}
}
...
@@ -188,9 +185,9 @@ class ResourceCollect:
...
@@ -188,9 +185,9 @@ class ResourceCollect:
)
)
)
)
disk_used_sum
=
zip
(
*
disk_result_cursor
)
disk_used_sum
,
=
disk_result_cursor
.
fetchone
(
)
if
len
(
disk_used_sum
)
and
disk_used_sum
[
0
][
0
]
is
not
None
:
if
disk_used_sum
is
not
None
:
io_dict
[
'disk_used'
]
=
round
(
disk_used_sum
[
0
][
0
]
/
1024.0
,
2
)
io_dict
[
'disk_used'
]
=
round
(
disk_used_sum
/
1024
,
2
)
self
.
db
.
close
()
self
.
db
.
close
()
return
(
process_dict
,
memory_dict
,
io_dict
)
return
(
process_dict
,
memory_dict
,
io_dict
)
...
@@ -252,7 +249,7 @@ def main():
...
@@ -252,7 +249,7 @@ def main():
status_file
=
os
.
path
.
join
(
parser
.
output_folder
,
'monitor_resource.status.json'
)
status_file
=
os
.
path
.
join
(
parser
.
output_folder
,
'monitor_resource.status.json'
)
if
not
os
.
path
.
exists
(
parser
.
collector_db
):
if
not
os
.
path
.
exists
(
parser
.
collector_db
):
print
"Collector database not found..."
print
(
"Collector database not found..."
)
initProcessDataFile
(
process_file
)
initProcessDataFile
(
process_file
)
initMemoryDataFile
(
mem_file
)
initMemoryDataFile
(
mem_file
)
initIODataFile
(
io_file
)
initIODataFile
(
io_file
)
...
...
slapos/monitor/globalstate.py
View file @
1c8269b2
#!/usr/bin/env python
#!/usr/bin/env python
from
__future__
import
print_function
import
sys
import
sys
import
os
import
os
import
glob
import
glob
import
json
import
json
import
ConfigP
arser
from
six.moves
import
configp
arser
import
time
import
time
from
datetime
import
datetime
from
datetime
import
datetime
import
base64
import
base64
import
hashlib
import
hashlib
import
PyRSS2Gen
import
PyRSS2Gen
from
slapos.util
import
bytes2str
,
str2bytes
def
getKey
(
item
):
def
getKey
(
item
):
return
item
.
source
.
name
return
item
.
source
.
name
...
@@ -30,6 +34,8 @@ class MonitorFeed(object):
...
@@ -30,6 +34,8 @@ class MonitorFeed(object):
event_date
=
item_dict
[
'result'
][
'change-date'
]
event_date
=
item_dict
[
'result'
][
'change-date'
]
report_date
=
item_dict
[
'result'
][
'date'
]
report_date
=
item_dict
[
'result'
][
'date'
]
description
=
item_dict
[
'result'
].
get
(
'message'
,
''
)
description
=
item_dict
[
'result'
].
get
(
'message'
,
''
)
guid
=
base64
.
b64encode
(
str2bytes
(
"%s, %s, %s, %s"
%
(
self
.
hosting_name
,
item_dict
[
'title'
],
has_string
,
event_date
)))
rss_item
=
PyRSS2Gen
.
RSSItem
(
rss_item
=
PyRSS2Gen
.
RSSItem
(
categories
=
[
item_dict
[
'status'
]],
categories
=
[
item_dict
[
'status'
]],
source
=
PyRSS2Gen
.
Source
(
item_dict
[
'title'
],
self
.
public_url
),
source
=
PyRSS2Gen
.
Source
(
item_dict
[
'title'
],
self
.
public_url
),
...
@@ -37,9 +43,7 @@ class MonitorFeed(object):
...
@@ -37,9 +43,7 @@ class MonitorFeed(object):
description
=
"
\
n
%s"
%
(
description
,),
description
=
"
\
n
%s"
%
(
description
,),
link
=
self
.
private_url
,
link
=
self
.
private_url
,
pubDate
=
event_date
,
pubDate
=
event_date
,
guid
=
PyRSS2Gen
.
Guid
(
base64
.
b64encode
(
"%s, %s, %s, %s"
%
(
self
.
hosting_name
,
guid
=
PyRSS2Gen
.
Guid
(
bytes2str
(
guid
),
isPermaLink
=
False
)
item_dict
[
'title'
],
has_string
,
event_date
)),
isPermaLink
=
False
)
)
)
self
.
rss_item_list
.
append
(
rss_item
)
self
.
rss_item_list
.
append
(
rss_item
)
...
@@ -69,7 +73,7 @@ def generateStatisticsData(stat_file_path, content):
...
@@ -69,7 +73,7 @@ def generateStatisticsData(stat_file_path, content):
fstat
.
write
(
json
.
dumps
(
data_dict
))
fstat
.
write
(
json
.
dumps
(
data_dict
))
current_state
=
''
current_state
=
''
if
content
.
has_key
(
'state'
)
:
if
'state'
in
content
:
current_state
=
'%s, %s, %s, %s'
%
(
current_state
=
'%s, %s, %s, %s'
%
(
content
[
'date'
],
content
[
'date'
],
content
[
'state'
][
'success'
],
content
[
'state'
][
'success'
],
...
@@ -131,13 +135,14 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
...
@@ -131,13 +135,14 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
promise_status
=
"OK"
promise_status
=
"OK"
success
+=
1
success
+=
1
tmp_json
[
'result'
][
'change-date'
]
=
tmp_json
[
'result'
][
'date'
]
tmp_json
[
'result'
][
'change-date'
]
=
tmp_json
[
'result'
][
'date'
]
if
previous_state_dict
.
has_key
(
tmp_json
[
'name'
])
:
if
tmp_json
[
'name'
]
in
previous_state_dict
:
status
,
change_date
,
_
=
previous_state_dict
[
tmp_json
[
'name'
]]
status
,
change_date
,
_
=
previous_state_dict
[
tmp_json
[
'name'
]]
if
promise_status
==
status
:
if
promise_status
==
status
:
tmp_json
[
'result'
][
'change-date'
]
=
change_date
tmp_json
[
'result'
][
'change-date'
]
=
change_date
tmp_json
[
'status'
]
=
promise_status
tmp_json
[
'status'
]
=
promise_status
message_hash
=
hashlib
.
md5
(
tmp_json
[
'result'
].
get
(
'message'
,
''
)).
hexdigest
()
message_hash
=
hashlib
.
md5
(
str2bytes
(
tmp_json
[
'result'
].
get
(
'message'
,
''
))).
hexdigest
()
new_state_dict
[
tmp_json
[
'name'
]]
=
[
new_state_dict
[
tmp_json
[
'name'
]]
=
[
promise_status
,
promise_status
,
tmp_json
[
'result'
][
'change-date'
],
tmp_json
[
'result'
][
'change-date'
],
...
@@ -150,9 +155,9 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
...
@@ -150,9 +155,9 @@ def generateMonitoringData(config, public_folder, private_folder, public_url,
previous_state_dict
.
get
(
tmp_json
[
'name'
]),
previous_state_dict
.
get
(
tmp_json
[
'name'
]),
public_folder
public_folder
)
)
except
ValueError
,
e
:
except
ValueError
as
e
:
# bad json file
# bad json file
print
"ERROR: Bad json file at: %s
\
n
%s"
%
(
file
,
str
(
e
))
print
(
"ERROR: Bad json file at: %s
\
n
%s"
%
(
file
,
e
))
continue
continue
with
open
(
promises_status_file
,
"w"
)
as
f
:
with
open
(
promises_status_file
,
"w"
)
as
f
:
...
@@ -187,7 +192,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
...
@@ -187,7 +192,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
else
:
else
:
if
previous_state_list
is
not
None
:
if
previous_state_list
is
not
None
:
_
,
change_date
,
checksum
=
previous_state_list
_
,
change_date
,
checksum
=
previous_state_list
current_sum
=
hashlib
.
md5
(
st
ate_dict
.
get
(
'message'
,
''
)).
hexdigest
()
current_sum
=
hashlib
.
md5
(
st
r2bytes
(
state_dict
.
get
(
'message'
,
''
)
)).
hexdigest
()
if
state_dict
[
'change-date'
]
==
change_date
and
\
if
state_dict
[
'change-date'
]
==
change_date
and
\
current_sum
==
checksum
:
current_sum
==
checksum
:
# Only save the changes and not the same info
# Only save the changes and not the same info
...
@@ -202,7 +207,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
...
@@ -202,7 +207,7 @@ def savePromiseHistory(promise_name, state_dict, previous_state_list,
def
run
(
monitor_conf_file
):
def
run
(
monitor_conf_file
):
config
=
ConfigP
arser
.
ConfigParser
()
config
=
configp
arser
.
ConfigParser
()
config
.
read
(
monitor_conf_file
)
config
.
read
(
monitor_conf_file
)
base_folder
=
config
.
get
(
'monitor'
,
'private-folder'
)
base_folder
=
config
.
get
(
'monitor'
,
'private-folder'
)
...
...
slapos/monitor/monitor.py
View file @
1c8269b2
#!/usr/bin/env python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from
__future__
import
print_function
import
sys
import
sys
import
os
import
os
import
stat
import
stat
import
json
import
json
import
ConfigP
arser
from
six.moves
import
configp
arser
import
traceback
import
traceback
import
argparse
import
argparse
import
urllib2
from
six.moves.urllib.request
import
urlopen
from
six.moves.urllib.error
import
HTTPError
import
ssl
import
ssl
import
glob
import
glob
import
socket
import
socket
from
datetime
import
datetime
from
datetime
import
datetime
from
xml.sax.saxutils
import
escape
from
xml.sax.saxutils
import
escape
from
slapos.util
import
bytes2str
OPML_START
=
"""<?xml version="1.0" encoding="UTF-8"?>
OPML_START
=
"""<?xml version="1.0" encoding="UTF-8"?>
<!-- OPML generated by SlapOS -->
<!-- OPML generated by SlapOS -->
<opml version="1.1">
<opml version="1.1">
...
@@ -47,7 +52,7 @@ def parseArguments():
...
@@ -47,7 +52,7 @@ def parseArguments():
def
mkdirAll
(
path
):
def
mkdirAll
(
path
):
try
:
try
:
os
.
makedirs
(
path
)
os
.
makedirs
(
path
)
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
==
os
.
errno
.
EEXIST
and
os
.
path
.
isdir
(
path
):
if
e
.
errno
==
os
.
errno
.
EEXIST
and
os
.
path
.
isdir
(
path
):
pass
pass
else
:
raise
else
:
raise
...
@@ -55,13 +60,13 @@ def mkdirAll(path):
...
@@ -55,13 +60,13 @@ def mkdirAll(path):
def
softConfigGet
(
config
,
*
args
,
**
kwargs
):
def
softConfigGet
(
config
,
*
args
,
**
kwargs
):
try
:
try
:
return
config
.
get
(
*
args
,
**
kwargs
)
return
config
.
get
(
*
args
,
**
kwargs
)
except
(
ConfigParser
.
NoOptionError
,
ConfigP
arser
.
NoSectionError
):
except
(
configparser
.
NoOptionError
,
configp
arser
.
NoSectionError
):
return
None
return
None
def
createSymlink
(
source
,
destination
):
def
createSymlink
(
source
,
destination
):
try
:
try
:
os
.
symlink
(
source
,
destination
)
os
.
symlink
(
source
,
destination
)
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
!=
os
.
errno
.
EEXIST
:
if
e
.
errno
!=
os
.
errno
.
EEXIST
:
raise
raise
...
@@ -99,10 +104,10 @@ class Monitoring(object):
...
@@ -99,10 +104,10 @@ class Monitoring(object):
def
loadConfig
(
self
,
pathes
,
config
=
None
):
def
loadConfig
(
self
,
pathes
,
config
=
None
):
if
config
is
None
:
if
config
is
None
:
config
=
ConfigP
arser
.
ConfigParser
()
config
=
configp
arser
.
ConfigParser
()
try
:
try
:
config
.
read
(
pathes
)
config
.
read
(
pathes
)
except
ConfigP
arser
.
MissingSectionHeaderError
:
except
configp
arser
.
MissingSectionHeaderError
:
traceback
.
print_exc
()
traceback
.
print_exc
()
return
config
return
config
...
@@ -131,8 +136,8 @@ class Monitoring(object):
...
@@ -131,8 +136,8 @@ class Monitoring(object):
try
:
try
:
with
open
(
config_list
[
2
])
as
cfile
:
with
open
(
config_list
[
2
])
as
cfile
:
param_value
=
cfile
.
read
()
param_value
=
cfile
.
read
()
except
OSError
,
e
:
except
OSError
as
e
:
print
'Cannot read file %s, Error is: %s'
%
(
config_list
[
2
],
str
(
e
))
print
(
'Cannot read file %s, Error is: %s'
%
(
config_list
[
2
],
e
))
pass
pass
else
:
else
:
param_value
=
""
param_value
=
""
...
@@ -147,7 +152,7 @@ class Monitoring(object):
...
@@ -147,7 +152,7 @@ class Monitoring(object):
)
)
if
config_list
[
0
]
==
'htpasswd'
:
if
config_list
[
0
]
==
'htpasswd'
:
if
len
(
config_list
)
!=
5
or
not
os
.
path
.
exists
(
config_list
[
4
]):
if
len
(
config_list
)
!=
5
or
not
os
.
path
.
exists
(
config_list
[
4
]):
print
'htpasswd file is not specified: %s'
%
str
(
config_list
)
print
(
'htpasswd file is not specified: %s'
%
config_list
)
continue
continue
parameter
[
'description'
][
'user'
]
=
config_list
[
3
]
parameter
[
'description'
][
'user'
]
=
config_list
[
3
]
parameter
[
'description'
][
'htpasswd'
]
=
config_list
[
4
]
parameter
[
'description'
][
'htpasswd'
]
=
config_list
[
4
]
...
@@ -178,8 +183,8 @@ class Monitoring(object):
...
@@ -178,8 +183,8 @@ class Monitoring(object):
}
}
)
)
configuration_list
.
append
(
parameter
)
configuration_list
.
append
(
parameter
)
except
OSError
,
e
:
except
OSError
as
e
:
print
'Cannot read file at %s, Error is: %s'
%
(
old_cors_file
,
str
(
e
))
print
(
'Cannot read file at %s, Error is: %s'
%
(
old_cors_file
,
e
))
pass
pass
return
configuration_list
return
configuration_list
...
@@ -192,7 +197,7 @@ class Monitoring(object):
...
@@ -192,7 +197,7 @@ class Monitoring(object):
try
:
try
:
mkdirAll
(
dirname
)
# could also raise OSError
mkdirAll
(
dirname
)
# could also raise OSError
os
.
symlink
(
path
,
os
.
path
.
join
(
dirname
,
os
.
path
.
basename
(
path
)))
os
.
symlink
(
path
,
os
.
path
.
join
(
dirname
,
os
.
path
.
basename
(
path
)))
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
!=
os
.
errno
.
EEXIST
:
if
e
.
errno
!=
os
.
errno
.
EEXIST
:
raise
raise
...
@@ -212,20 +217,20 @@ class Monitoring(object):
...
@@ -212,20 +217,20 @@ class Monitoring(object):
# XXX - working here with public url
# XXX - working here with public url
if
hasattr
(
ssl
,
'_create_unverified_context'
):
if
hasattr
(
ssl
,
'_create_unverified_context'
):
context
=
ssl
.
_create_unverified_context
()
context
=
ssl
.
_create_unverified_context
()
response
=
url
lib2
.
url
open
(
url
,
context
=
context
,
timeout
=
timeout
)
response
=
urlopen
(
url
,
context
=
context
,
timeout
=
timeout
)
else
:
else
:
response
=
url
lib2
.
url
open
(
url
,
timeout
=
timeout
)
response
=
urlopen
(
url
,
timeout
=
timeout
)
except
urllib2
.
HTTPError
:
except
HTTPError
:
print
"ERROR: Failed to get Monitor configuration file at %s "
%
url
print
(
"ERROR: Failed to get Monitor configuration file at %s "
%
url
)
except
(
socket
.
timeout
,
ssl
.
SSLError
)
as
e
:
except
(
socket
.
timeout
,
ssl
.
SSLError
)
as
e
:
print
"ERROR: Timeout with %r while downloading monitor config at %s "
%
(
e
,
url
)
print
(
"ERROR: Timeout with %r while downloading monitor config at %s "
%
(
e
,
url
)
)
else
:
else
:
try
:
try
:
monitor_dict
=
json
.
loads
(
response
.
read
(
))
monitor_dict
=
json
.
loads
(
bytes2str
(
response
.
read
()
))
monitor_title
=
monitor_dict
.
get
(
'title'
,
'Unknown Instance'
)
monitor_title
=
monitor_dict
.
get
(
'title'
,
'Unknown Instance'
)
success
=
True
success
=
True
except
ValueError
,
e
:
except
ValueError
as
e
:
print
"ERROR: Json file at %s is not valid"
%
url
print
(
"ERROR: Json file at %s is not valid"
%
url
)
self
.
bootstrap_is_ok
=
success
self
.
bootstrap_is_ok
=
success
return
monitor_title
return
monitor_title
...
@@ -267,8 +272,8 @@ class Monitoring(object):
...
@@ -267,8 +272,8 @@ class Monitoring(object):
for
parameter
in
parameter_list
:
for
parameter
in
parameter_list
:
if
parameter
[
'key'
]:
if
parameter
[
'key'
]:
pfile
.
write
(
'%s = %s
\
n
'
%
(
parameter
[
'key'
],
parameter
[
'value'
]))
pfile
.
write
(
'%s = %s
\
n
'
%
(
parameter
[
'key'
],
parameter
[
'value'
]))
except
OSError
,
e
:
except
OSError
as
e
:
print
"Error failed to create file %s"
%
self
.
parameter_cfg_file
print
(
"Error failed to create file %s"
%
self
.
parameter_cfg_file
)
pass
pass
...
@@ -317,8 +322,8 @@ class Monitoring(object):
...
@@ -317,8 +322,8 @@ class Monitoring(object):
try
:
try
:
if
os
.
path
.
exists
(
file
):
if
os
.
path
.
exists
(
file
):
os
.
unlink
(
file
)
os
.
unlink
(
file
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"failed to remove file %s."
%
file
,
str
(
e
)
print
(
"failed to remove file %s."
%
file
,
e
)
# cleanup result of promises that was removed
# cleanup result of promises that was removed
promise_list
=
os
.
listdir
(
self
.
legacy_promise_folder
)
promise_list
=
os
.
listdir
(
self
.
legacy_promise_folder
)
...
@@ -336,8 +341,8 @@ class Monitoring(object):
...
@@ -336,8 +341,8 @@ class Monitoring(object):
if
os
.
path
.
exists
(
status_path
):
if
os
.
path
.
exists
(
status_path
):
try
:
try
:
os
.
unlink
(
status_path
)
os
.
unlink
(
status_path
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"Error: Failed to delete %s"
%
status_path
,
str
(
e
)
print
(
"Error: Failed to delete %s"
%
status_path
,
e
)
else
:
else
:
promise_list
.
pop
(
position
)
promise_list
.
pop
(
position
)
...
@@ -366,7 +371,7 @@ class Monitoring(object):
...
@@ -366,7 +371,7 @@ class Monitoring(object):
if
self
.
bootstrap_is_ok
:
if
self
.
bootstrap_is_ok
:
with
open
(
self
.
promise_output_file
,
'w'
)
as
promise_file
:
with
open
(
self
.
promise_output_file
,
'w'
)
as
promise_file
:
promise_file
.
write
(
""
)
promise_file
.
write
(
""
)
print
"SUCCESS: bootstrap is OK"
print
(
"SUCCESS: bootstrap is OK"
)
return
0
return
0
...
...
slapos/monitor/monitor_config_write.py
View file @
1c8269b2
#!/usr/bin/env python
#!/usr/bin/env python
from
__future__
import
print_function
import
sys
import
sys
import
os
import
os
import
re
import
re
...
@@ -38,11 +40,11 @@ class MonitorConfigWrite(object):
...
@@ -38,11 +40,11 @@ class MonitorConfigWrite(object):
def
_fileWrite
(
self
,
file_path
,
content
):
def
_fileWrite
(
self
,
file_path
,
content
):
try
:
try
:
with
open
(
file_path
,
'w'
)
as
wf
:
with
open
(
file_path
,
'w'
)
as
wf
:
print
file_path
,
content
print
(
file_path
,
content
)
wf
.
write
(
content
.
strip
())
wf
.
write
(
content
.
strip
())
return
True
return
True
except
OSError
,
e
:
except
OSError
as
e
:
print
"ERROR while writing changes to %s.
\
n
%s"
%
(
file_path
,
str
(
e
))
print
(
"ERROR while writing changes to %s.
\
n
%s"
%
(
file_path
,
e
))
return
False
return
False
def
_htpasswdWrite
(
self
,
htpasswd_bin
,
parameter_dict
,
value
):
def
_htpasswdWrite
(
self
,
htpasswd_bin
,
parameter_dict
,
value
):
...
@@ -55,7 +57,7 @@ class MonitorConfigWrite(object):
...
@@ -55,7 +57,7 @@ class MonitorConfigWrite(object):
)
)
result
=
process
.
communicate
()[
0
]
result
=
process
.
communicate
()[
0
]
if
process
.
returncode
!=
0
:
if
process
.
returncode
!=
0
:
print
result
print
(
result
)
return
False
return
False
with
open
(
parameter_dict
[
'file'
],
'w'
)
as
pfile
:
with
open
(
parameter_dict
[
'file'
],
'w'
)
as
pfile
:
pfile
.
write
(
value
)
pfile
.
write
(
value
)
...
@@ -76,31 +78,31 @@ class MonitorConfigWrite(object):
...
@@ -76,31 +78,31 @@ class MonitorConfigWrite(object):
or
(
cors_domain
==
""
and
os
.
stat
(
httpd_cors_file
).
st_size
==
0
)):
or
(
cors_domain
==
""
and
os
.
stat
(
httpd_cors_file
).
st_size
==
0
)):
# Skip if cors file is not empty
# Skip if cors file is not empty
return
True
return
True
except
OSError
,
e
:
except
OSError
as
e
:
print
"Failed to open file at %s.
\
n
%s"
%
(
old_httpd_cors_file
,
str
(
e
))
print
(
"Failed to open file at %s.
\
n
%s"
%
(
old_httpd_cors_file
,
e
))
try
:
try
:
with
open
(
self
.
monitor_https_cors
,
'r'
)
as
cors_template
:
with
open
(
self
.
monitor_https_cors
,
'r'
)
as
cors_template
:
template
=
jinja2
.
Template
(
cors_template
.
read
())
template
=
jinja2
.
Template
(
cors_template
.
read
())
rendered_string
=
template
.
render
(
domain
=
cors_domain
)
rendered_string
=
template
.
render
(
domain
=
cors_domain
)
with
open
(
httpd_cors_file
,
'w'
)
as
file
:
with
open
(
httpd_cors_file
,
'w'
)
as
file
:
file
.
write
(
rendered_string
)
file
.
write
(
rendered_string
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"ERROR while writing CORS changes to %s.
\
n
%s"
%
(
httpd_cors_file
,
str
(
e
))
print
(
"ERROR while writing CORS changes to %s.
\
n
%s"
%
(
httpd_cors_file
,
e
))
return
False
return
False
# Save current cors domain list
# Save current cors domain list
try
:
try
:
with
open
(
old_httpd_cors_file
,
'w'
)
as
cors_file
:
with
open
(
old_httpd_cors_file
,
'w'
)
as
cors_file
:
cors_file
.
write
(
cors_domain
)
cors_file
.
write
(
cors_domain
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"Failed to open file at %s.
\
n
%s"
%
(
old_httpd_cors_file
,
str
(
e
))
print
(
"Failed to open file at %s.
\
n
%s"
%
(
old_httpd_cors_file
,
e
))
return
False
return
False
# Restart httpd process
# Restart httpd process
try
:
try
:
subprocess
.
call
(
httpd_gracefull_bin
)
subprocess
.
call
(
httpd_gracefull_bin
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"Failed to execute command %s.
\
n
%s"
%
(
httpd_gracefull_bin
,
str
(
e
))
print
(
"Failed to execute command %s.
\
n
%s"
%
(
httpd_gracefull_bin
,
e
))
return
False
return
False
return
True
return
True
...
@@ -122,7 +124,7 @@ class MonitorConfigWrite(object):
...
@@ -122,7 +124,7 @@ class MonitorConfigWrite(object):
with
open
(
self
.
config_json_file
)
as
tmpfile
:
with
open
(
self
.
config_json_file
)
as
tmpfile
:
new_parameter_list
=
json
.
loads
(
tmpfile
.
read
())
new_parameter_list
=
json
.
loads
(
tmpfile
.
read
())
except
ValueError
:
except
ValueError
:
print
"Error: Couldn't parse json file %s"
%
self
.
config_json_file
print
(
"Error: Couldn't parse json file %s"
%
self
.
config_json_file
)
with
open
(
parameter_config_file
)
as
tmpfile
:
with
open
(
parameter_config_file
)
as
tmpfile
:
description_dict
=
json
.
loads
(
tmpfile
.
read
())
description_dict
=
json
.
loads
(
tmpfile
.
read
())
...
@@ -156,8 +158,8 @@ class MonitorConfigWrite(object):
...
@@ -156,8 +158,8 @@ class MonitorConfigWrite(object):
for
parameter
in
new_parameter_list
:
for
parameter
in
new_parameter_list
:
if
parameter
[
'key'
]:
if
parameter
[
'key'
]:
pfile
.
write
(
'%s = %s
\
n
'
%
(
parameter
[
'key'
],
parameter
[
'value'
]))
pfile
.
write
(
'%s = %s
\
n
'
%
(
parameter
[
'key'
],
parameter
[
'value'
]))
except
OSError
,
e
:
except
OSError
as
e
:
print
"Error failed to create file %s"
%
self
.
output_cfg_file
print
(
"Error failed to create file %s"
%
self
.
output_cfg_file
)
pass
pass
return
result_dict
return
result_dict
...
@@ -190,8 +192,8 @@ def main():
...
@@ -190,8 +192,8 @@ def main():
if
status
and
os
.
path
.
exists
(
parameter_tmp_file
):
if
status
and
os
.
path
.
exists
(
parameter_tmp_file
):
try
:
try
:
os
.
unlink
(
config_file
)
os
.
unlink
(
config_file
)
except
OSError
,
e
:
except
OSError
as
e
:
print
"ERROR cannot remove file: %s"
%
parameter_tmp_file
print
(
"ERROR cannot remove file: %s"
%
parameter_tmp_file
)
else
:
else
:
os
.
rename
(
parameter_tmp_file
,
config_file
)
os
.
rename
(
parameter_tmp_file
,
config_file
)
if
run_counter
==
max_runn
:
if
run_counter
==
max_runn
:
...
...
slapos/monitor/runpromise.py
View file @
1c8269b2
...
@@ -13,7 +13,7 @@ import glob
...
@@ -13,7 +13,7 @@ import glob
import
argparse
import
argparse
import
traceback
import
traceback
import
logging
import
logging
import
ConfigP
arser
from
six.moves
import
configp
arser
from
slapos.grid.promise
import
PromiseLauncher
,
PromiseQueueResult
,
PromiseError
from
slapos.grid.promise
import
PromiseLauncher
,
PromiseQueueResult
,
PromiseError
from
slapos.grid.promise.generic
import
PROMISE_LOG_FOLDER_NAME
from
slapos.grid.promise.generic
import
PROMISE_LOG_FOLDER_NAME
from
slapos.util
import
mkdir_p
from
slapos.util
import
mkdir_p
...
@@ -92,7 +92,7 @@ class MonitorPromiseLauncher(object):
...
@@ -92,7 +92,7 @@ class MonitorPromiseLauncher(object):
def
_loadConfigFromFile
(
self
,
config_file
):
def
_loadConfigFromFile
(
self
,
config_file
):
config
=
ConfigP
arser
.
ConfigParser
()
config
=
configp
arser
.
ConfigParser
()
config
.
read
([
config_file
])
config
.
read
([
config_file
])
known_key_list
=
[
'partition-cert'
,
'partition-key'
,
'partition-id'
,
known_key_list
=
[
'partition-cert'
,
'partition-key'
,
'partition-id'
,
'pid-path'
,
'computer-id'
,
'check-anomaly'
,
'pid-path'
,
'computer-id'
,
'check-anomaly'
,
...
@@ -159,7 +159,7 @@ class MonitorPromiseLauncher(object):
...
@@ -159,7 +159,7 @@ class MonitorPromiseLauncher(object):
exit_code
=
0
exit_code
=
0
try
:
try
:
promise_launcher
.
run
()
promise_launcher
.
run
()
except
PromiseError
,
e
:
except
PromiseError
as
e
:
# error was already logged
# error was already logged
exit_code
=
1
exit_code
=
1
os
.
remove
(
self
.
config
.
pid_path
)
os
.
remove
(
self
.
config
.
pid_path
)
...
...
slapos/networkbench/__init__.py
View file @
1c8269b2
from
__future__
import
print_function
import
socket
import
socket
import
logging
import
logging
import
time
import
time
...
@@ -11,10 +13,10 @@ import random
...
@@ -11,10 +13,10 @@ import random
import
pycurl
import
pycurl
import
argparse
import
argparse
import
json
import
json
from
StringIO
import
StringIO
from
io
import
StringIO
from
ping
import
ping
,
ping6
from
.
ping
import
ping
,
ping6
from
dnsbench
import
resolve
from
.
dnsbench
import
resolve
from
http
import
get_curl
,
request
from
.
http
import
get_curl
,
request
import
textwrap
import
textwrap
class
HelpFormatter
(
argparse
.
ArgumentDefaultsHelpFormatter
):
class
HelpFormatter
(
argparse
.
ArgumentDefaultsHelpFormatter
):
...
@@ -62,11 +64,11 @@ def download_external_configuration(url):
...
@@ -62,11 +64,11 @@ def download_external_configuration(url):
try
:
try
:
return
json
.
loads
(
buffer
.
getvalue
())
return
json
.
loads
(
buffer
.
getvalue
())
except
ValueError
:
except
ValueError
:
print
"Unable to parse external configuration, error:"
print
(
"Unable to parse external configuration, error:"
)
import
traceback
import
traceback
traceback
.
print_exc
(
file
=
sys
.
stderr
)
traceback
.
print_exc
(
file
=
sys
.
stderr
)
sys
.
stderr
.
flush
()
sys
.
stderr
.
flush
()
print
"Ignoring external configuration"
print
(
"Ignoring external configuration"
)
finally
:
finally
:
curl
.
close
()
curl
.
close
()
...
...
slapos/networkbench/http.py
View file @
1c8269b2
import
sys
import
sys
import
pycurl
import
pycurl
from
StringIO
import
StringIO
from
io
import
BytesIO
from
slapos.util
import
bytes2str
def
get_curl
(
buffer
,
url
):
def
get_curl
(
buffer
,
url
):
curl
=
pycurl
.
Curl
()
curl
=
pycurl
.
Curl
()
...
@@ -22,7 +23,7 @@ def get_curl(buffer, url):
...
@@ -22,7 +23,7 @@ def get_curl(buffer, url):
def
request
(
url
,
expected_dict
):
def
request
(
url
,
expected_dict
):
buffer
=
String
IO
()
buffer
=
Bytes
IO
()
curl
,
result
=
get_curl
(
buffer
,
url
)
curl
,
result
=
get_curl
(
buffer
,
url
)
body
=
buffer
.
getvalue
()
body
=
buffer
.
getvalue
()
...
@@ -43,7 +44,7 @@ def request(url, expected_dict):
...
@@ -43,7 +44,7 @@ def request(url, expected_dict):
expected_text
=
expected_dict
.
get
(
"expected_text"
,
None
)
expected_text
=
expected_dict
.
get
(
"expected_text"
,
None
)
if
expected_text
is
not
None
and
\
if
expected_text
is
not
None
and
\
str
(
expected_text
)
not
in
str
(
body
):
str
(
expected_text
)
not
in
bytes2
str
(
body
):
result
=
"UNEXPECTED (%s not in page content)"
%
(
expected_text
)
result
=
"UNEXPECTED (%s not in page content)"
%
(
expected_text
)
curl
.
close
()
curl
.
close
()
...
...
slapos/networkbench/ping.py
View file @
1c8269b2
...
@@ -22,14 +22,15 @@ def ping(host, timeout=10, protocol="4", count=10):
...
@@ -22,14 +22,15 @@ def ping(host, timeout=10, protocol="4", count=10):
test_title
=
'PING6'
test_title
=
'PING6'
proc
=
subprocess
.
Popen
((
ping_bin
,
'-c'
,
str
(
count
),
'-w'
,
str
(
timeout
),
host
),
proc
=
subprocess
.
Popen
((
ping_bin
,
'-c'
,
str
(
count
),
'-w'
,
str
(
timeout
),
host
),
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
universal_newlines
=
True
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
PIPE
)
out
,
err
=
proc
.
communicate
()
out
,
err
=
proc
.
communicate
()
if
'Network is unreachable'
in
err
:
if
'Network is unreachable'
in
err
:
return
(
test_title
,
host
,
600
,
'failed'
,
100
,
"Network is unreachable"
)
return
(
test_title
,
host
,
600
,
'failed'
,
100
,
"Network is unreachable"
)
try
:
try
:
packet_loss_line
,
summary_line
=
(
out
.
splitlines
()
or
[
''
])[
-
2
:]
packet_loss_line
,
summary_line
=
(
out
.
splitlines
()
or
[
''
])[
-
2
:]
except
:
except
Exception
:
return
(
test_title
,
host
,
600
,
'failed'
,
-
1
,
"Fail to parser ping output"
)
return
(
test_title
,
host
,
600
,
'failed'
,
-
1
,
"Fail to parser ping output"
)
m
=
ping_re
.
match
(
summary_line
)
m
=
ping_re
.
match
(
summary_line
)
match
=
re
.
search
(
'(
\
d*)% p
a
cket loss'
,
packet_loss_line
)
match
=
re
.
search
(
'(
\
d*)% p
a
cket loss'
,
packet_loss_line
)
...
...
slapos/onetimedownload.py
View file @
1c8269b2
...
@@ -27,7 +27,7 @@
...
@@ -27,7 +27,7 @@
import
os
import
os
import
sys
import
sys
from
optparse
import
OptionParser
,
Option
from
optparse
import
OptionParser
,
Option
import
urllib
from
six.moves.urllib.request
import
urlopen
class
Parser
(
OptionParser
):
class
Parser
(
OptionParser
):
...
@@ -55,7 +55,7 @@ class Config:
...
@@ -55,7 +55,7 @@ class Config:
self
.
file_path
=
file_path
self
.
file_path
=
file_path
def
onetimedownload
(
url
,
file_path
):
def
onetimedownload
(
url
,
file_path
):
url_file
=
url
lib
.
url
open
(
url
)
url_file
=
urlopen
(
url
)
data
=
url_file
.
read
()
data
=
url_file
.
read
()
file_object
=
open
(
file_path
,
'w'
)
file_object
=
open
(
file_path
,
'w'
)
file_object
.
write
(
data
)
file_object
.
write
(
data
)
...
@@ -72,7 +72,7 @@ def main():
...
@@ -72,7 +72,7 @@ def main():
onetimedownload
(
config
.
url
,
config
.
file_path
)
onetimedownload
(
config
.
url
,
config
.
file_path
)
return_code
=
0
return_code
=
0
except
SystemExit
,
err
:
except
SystemExit
as
err
:
# Catch exception raise by optparse
# Catch exception raise by optparse
return_code
=
err
return_code
=
err
...
...
slapos/onetimeupload/__init__.py
View file @
1c8269b2
...
@@ -124,7 +124,7 @@ def main():
...
@@ -124,7 +124,7 @@ def main():
run
(
config
)
run
(
config
)
return_code
=
0
return_code
=
0
except
SystemExit
,
err
:
except
SystemExit
as
err
:
# Catch exception raise by optparse
# Catch exception raise by optparse
return_code
=
err
return_code
=
err
...
...
slapos/promise/apache_mpm_watchdog/__init__.py
View file @
1c8269b2
from
__future__
import
print_function
import
requests
import
requests
import
re
import
re
import
signal
import
signal
...
@@ -56,11 +58,11 @@ def watchServerStatus(pid_dict, server_status, timeout):
...
@@ -56,11 +58,11 @@ def watchServerStatus(pid_dict, server_status, timeout):
if
process
.
cmdline
()[
0
].
endswith
(
"/httpd"
):
if
process
.
cmdline
()[
0
].
endswith
(
"/httpd"
):
_pid_dict
.
setdefault
(
i
,
time
.
time
()
+
timeout
)
_pid_dict
.
setdefault
(
i
,
time
.
time
()
+
timeout
)
if
_pid_dict
[
i
]
<
time
.
time
():
if
_pid_dict
[
i
]
<
time
.
time
():
print
"Sending signal -%s to %s"
%
(
signal
.
SIGKILL
,
i
)
print
(
"Sending signal -%s to %s"
%
(
signal
.
SIGKILL
,
i
)
)
try
:
try
:
process
.
kill
()
process
.
kill
()
except
psutil
.
NoSuchProcess
:
except
psutil
.
NoSuchProcess
:
print
"Process is not there anymore"
print
(
"Process is not there anymore"
)
continue
continue
return
_pid_dict
return
_pid_dict
...
...
slapos/promise/check_apachedex_result/__init__.py
View file @
1c8269b2
...
@@ -47,7 +47,7 @@ def checkApachedexResult(apachedex_path, apachedex_report_status_file, desired_t
...
@@ -47,7 +47,7 @@ def checkApachedexResult(apachedex_path, apachedex_report_status_file, desired_t
with
open
(
apachedex_report_status_file
)
as
f
:
with
open
(
apachedex_report_status_file
)
as
f
:
try
:
try
:
json_content
=
json
.
load
(
f
)
json_content
=
json
.
load
(
f
)
except
ValueError
,
e
:
except
ValueError
as
e
:
json_content
=
''
json_content
=
''
if
json_content
:
if
json_content
:
message
+=
"
\
n
"
+
json_content
[
"message"
]
message
+=
"
\
n
"
+
json_content
[
"message"
]
...
...
slapos/promise/check_computer_memory/__init__.py
View file @
1c8269b2
...
@@ -7,6 +7,8 @@ Uses:
...
@@ -7,6 +7,8 @@ Uses:
- /proc/meminfo
- /proc/meminfo
"""
"""
from
__future__
import
print_function
import
sys
import
sys
import
sqlite3
import
sqlite3
import
argparse
import
argparse
...
@@ -21,21 +23,21 @@ def getMemoryInfo(database, time, date):
...
@@ -21,21 +23,21 @@ def getMemoryInfo(database, time, date):
try
:
try
:
database
.
connect
()
database
.
connect
()
query_result
=
database
.
select
(
"computer"
,
date
,
"memory_size"
,
limit
=
1
)
query_result
=
database
.
select
(
"computer"
,
date
,
"memory_size"
,
limit
=
1
)
r
esult
=
zip
(
*
query_result
)
r
=
query_result
.
fetchone
(
)
if
not
r
esult
or
not
result
[
0
]
[
0
]:
if
not
r
or
not
r
[
0
]:
return
(
None
,
"couldn't fetch total memory, collectordb is empty?"
)
return
(
None
,
"couldn't fetch total memory, collectordb is empty?"
)
memory_info
[
'total'
]
=
int
(
r
esult
[
0
]
[
0
])
# in byte
memory_info
[
'total'
]
=
int
(
r
[
0
])
# in byte
# fetch free and used memory
# fetch free and used memory
where_query
=
"time between '%s:00' and '%s:30' "
%
(
time
,
time
)
where_query
=
"time between '%s:00' and '%s:30' "
%
(
time
,
time
)
query_result
=
database
.
select
(
"system"
,
date
,
"memory_free, memory_used"
,
where
=
where_query
)
query_result
=
database
.
select
(
"system"
,
date
,
"memory_free, memory_used"
,
where
=
where_query
)
r
esult
=
zip
(
*
query_result
)
r
=
query_result
.
fetchone
(
)
if
not
r
esult
or
not
result
[
0
][
0
]:
if
not
r
or
not
r
[
0
]:
return
(
None
,
"couldn't fetch free memory"
)
return
(
None
,
"couldn't fetch free memory"
)
memory_info
[
'free'
]
=
int
(
r
esult
[
0
]
[
0
])
# in byte
memory_info
[
'free'
]
=
int
(
r
[
0
])
# in byte
if
not
r
esult
or
not
result
[
1
][
0
]:
if
not
r
or
not
r
[
1
]:
return
(
None
,
"couldn't fetch used memory"
)
return
(
None
,
"couldn't fetch used memory"
)
memory_info
[
'used'
]
=
int
(
r
esult
[
1
][
0
])
# in byte
memory_info
[
'used'
]
=
int
(
r
[
1
])
# in byte
finally
:
finally
:
database
.
close
()
database
.
close
()
...
@@ -95,9 +97,9 @@ def main():
...
@@ -95,9 +97,9 @@ def main():
unit
=
args
.
unit
,
unit
=
args
.
unit
,
)
)
if
error
:
if
error
:
print
error
print
(
error
)
return
0
return
0
print
message
print
(
message
)
return
0
if
result
else
1
return
0
if
result
else
1
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
slapos/promise/check_slow_queries_digest_result/__init__.py
View file @
1c8269b2
...
@@ -4,6 +4,8 @@
...
@@ -4,6 +4,8 @@
Check if a mariadb result matches the desired threshold or raises an error.
Check if a mariadb result matches the desired threshold or raises an error.
"""
"""
from
__future__
import
print_function
import
json
import
json
import
os
import
os
import
re
import
re
...
@@ -58,7 +60,7 @@ def checkMariadbDigestResult(mariadbdex_path, mariadbdex_report_status_file,
...
@@ -58,7 +60,7 @@ def checkMariadbDigestResult(mariadbdex_path, mariadbdex_report_status_file,
with
open
(
mariadbdex_report_status_file
)
as
f
:
with
open
(
mariadbdex_report_status_file
)
as
f
:
try
:
try
:
json_content
=
json
.
load
(
f
)
json_content
=
json
.
load
(
f
)
except
ValueError
,
e
:
except
ValueError
as
e
:
json_content
=
''
json_content
=
''
if
json_content
:
if
json_content
:
message
+=
"
\
n
"
+
json_content
[
"message"
]
message
+=
"
\
n
"
+
json_content
[
"message"
]
...
@@ -76,5 +78,5 @@ def main():
...
@@ -76,5 +78,5 @@ def main():
args
.
max_queries_threshold
,
args
.
slowest_query_threshold
args
.
max_queries_threshold
,
args
.
slowest_query_threshold
)
)
print
message
print
(
message
)
sys
.
exit
(
status
)
sys
.
exit
(
status
)
slapos/promise/check_user_memory/__init__.py
View file @
1c8269b2
...
@@ -4,6 +4,8 @@
...
@@ -4,6 +4,8 @@
Check user memory usage according to a given threshold.
Check user memory usage according to a given threshold.
"""
"""
from
__future__
import
print_function
import
sys
import
sys
import
os
import
os
import
argparse
import
argparse
...
@@ -87,9 +89,9 @@ def main():
...
@@ -87,9 +89,9 @@ def main():
unit
=
args
.
unit
,
unit
=
args
.
unit
,
)
)
if
error
:
if
error
:
print
error
print
(
error
)
return
0
return
0
print
message
print
(
message
)
return
0
if
result
else
1
return
0
if
result
else
1
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
slapos/promise/check_web_page_http_cache_hit/__init__.py
View file @
1c8269b2
...
@@ -7,14 +7,14 @@ import sys
...
@@ -7,14 +7,14 @@ import sys
import
tempfile
import
tempfile
import
os
import
os
import
argparse
import
argparse
import
ConfigP
arser
from
six.moves
import
configp
arser
import
re
import
re
import
pycurl
import
pycurl
from
mimetools
import
Message
from
email.message
import
Message
from
cStringIO
import
String
IO
from
io
import
Bytes
IO
from
HTMLP
arser
import
HTMLParser
from
six.moves.html_p
arser
import
HTMLParser
begins_by_known_protocol_re
=
re
.
compile
(
"^https?://"
)
begins_by_known_protocol_re
=
re
.
compile
(
"^https?://"
)
get_protocol_re
=
re
.
compile
(
"^([a-z]+)://"
)
get_protocol_re
=
re
.
compile
(
"^([a-z]+)://"
)
...
@@ -63,8 +63,8 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
...
@@ -63,8 +63,8 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
parsed_url_dict
.
add
(
url
)
parsed_url_dict
.
add
(
url
)
print
(
"Checking cache hit for "
+
url
)
print
(
"Checking cache hit for "
+
url
)
c
=
pycurl
.
Curl
()
c
=
pycurl
.
Curl
()
response_headers
=
String
IO
()
response_headers
=
Bytes
IO
()
output
=
String
IO
()
output
=
Bytes
IO
()
c
.
setopt
(
c
.
URL
,
url
)
c
.
setopt
(
c
.
URL
,
url
)
c
.
setopt
(
c
.
RESOLVE
,
resolve_list
)
c
.
setopt
(
c
.
RESOLVE
,
resolve_list
)
c
.
setopt
(
c
.
WRITEFUNCTION
,
output
.
write
)
c
.
setopt
(
c
.
WRITEFUNCTION
,
output
.
write
)
...
@@ -76,8 +76,9 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
...
@@ -76,8 +76,9 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
response_headers
.
truncate
(
0
)
response_headers
.
truncate
(
0
)
output
.
truncate
(
0
)
output
.
truncate
(
0
)
c
.
perform
()
c
.
perform
()
if
str
(
c
.
getinfo
(
pycurl
.
HTTP_CODE
))[
0
:
1
]
!=
"2"
:
code
=
c
.
getinfo
(
pycurl
.
HTTP_CODE
)
if
c
.
getinfo
(
pycurl
.
HTTP_CODE
)
>=
400
:
if
not
(
200
<=
code
<
300
):
if
code
>=
400
:
report_line_list
.
append
(
"Status code %s received for %s"
%
(
c
.
getinfo
(
pycurl
.
HTTP_CODE
),
url
))
report_line_list
.
append
(
"Status code %s received for %s"
%
(
c
.
getinfo
(
pycurl
.
HTTP_CODE
),
url
))
else
:
else
:
print
(
"Status code %s not handled"
%
c
.
getinfo
(
pycurl
.
HTTP_CODE
))
print
(
"Status code %s not handled"
%
c
.
getinfo
(
pycurl
.
HTTP_CODE
))
...
@@ -119,7 +120,7 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
...
@@ -119,7 +120,7 @@ def checkWebpageHttpCacheHit(url_list, resolve_list=[], cookie_jar_path=None):
def
getConfig
(
config_parser
,
section
,
option
,
default
=
None
,
raw
=
False
,
vars
=
None
):
def
getConfig
(
config_parser
,
section
,
option
,
default
=
None
,
raw
=
False
,
vars
=
None
):
try
:
try
:
return
config_parser
.
get
(
section
,
option
,
raw
=
raw
,
vars
=
vars
)
return
config_parser
.
get
(
section
,
option
,
raw
=
raw
,
vars
=
vars
)
except
ConfigP
arser
.
NoOptionError
:
except
configp
arser
.
NoOptionError
:
return
default
return
default
def
main
():
def
main
():
...
@@ -132,7 +133,7 @@ def main():
...
@@ -132,7 +133,7 @@ def main():
args
.
url_list
=
getattr
(
args
,
"url-list"
)
args
.
url_list
=
getattr
(
args
,
"url-list"
)
if
args
.
config
is
not
None
:
if
args
.
config
is
not
None
:
parser
=
ConfigP
arser
.
ConfigParser
()
parser
=
configp
arser
.
ConfigParser
()
parser
.
read
(
args
.
config
)
parser
.
read
(
args
.
config
)
if
args
.
url_list
==
[]:
if
args
.
url_list
==
[]:
args
.
url_list
=
getConfig
(
parser
,
"public"
,
"url-list"
,
""
).
split
()
args
.
url_list
=
getConfig
(
parser
,
"public"
,
"url-list"
,
""
).
split
()
...
...
slapos/promise/is_local_tcp_port_opened/__init__.py
View file @
1c8269b2
...
@@ -28,7 +28,8 @@ def isLocalTcpPortOpened(ip_address, port):
...
@@ -28,7 +28,8 @@ def isLocalTcpPortOpened(ip_address, port):
ip_addr_hex
=
(
'%08X'
*
int_count
)
%
struct
.
unpack
(
'I'
*
int_count
,
socket
.
inet_pton
(
family
,
ip_address
))
ip_addr_hex
=
(
'%08X'
*
int_count
)
%
struct
.
unpack
(
'I'
*
int_count
,
socket
.
inet_pton
(
family
,
ip_address
))
full_addr_hex
=
ip_addr_hex
+
":%04X"
%
port
full_addr_hex
=
ip_addr_hex
+
":%04X"
%
port
return
any
(
full_addr_hex
==
line
.
split
()[
1
]
for
line
in
open
(
tcp_path
).
readlines
())
with
open
(
tcp_path
)
as
f
:
return
any
(
full_addr_hex
==
line
.
split
()[
1
]
for
line
in
f
.
readlines
())
def
main
():
def
main
():
if
isLocalTcpPortOpened
(
sys
.
argv
[
1
],
int
(
sys
.
argv
[
2
])):
if
isLocalTcpPortOpened
(
sys
.
argv
[
1
],
int
(
sys
.
argv
[
2
])):
...
...
slapos/promise/is_process_older_than_dependency_set/__init__.py
View file @
1c8269b2
...
@@ -8,6 +8,8 @@ a file modification date is greater than the start date of the
...
@@ -8,6 +8,8 @@ a file modification date is greater than the start date of the
process.
process.
"""
"""
from
__future__
import
print_function
import
sys
import
sys
import
os
import
os
import
errno
import
errno
...
@@ -29,10 +31,10 @@ def moduleIsModifiedSince(top, since, followlinks=False):
...
@@ -29,10 +31,10 @@ def moduleIsModifiedSince(top, since, followlinks=False):
if
ext
in
ignored_extension_set
:
if
ext
in
ignored_extension_set
:
continue
continue
if
since
<
os
.
stat
(
os
.
path
.
join
(
root
,
name
)).
st_mtime
:
if
since
<
os
.
stat
(
os
.
path
.
join
(
root
,
name
)).
st_mtime
:
print
"%s was modified since the process started."
%
\
print
(
"%s was modified since the process started."
%
os
.
path
.
join
(
root
,
name
)
os
.
path
.
join
(
root
,
name
)
)
print
"Process Time %s < Last modidified file %s"
%
(
time
.
ctime
(
since
),
print
(
"Process Time %s < Last modidified file %s"
%
(
time
.
ctime
(
since
),
time
.
ctime
(
os
.
stat
(
os
.
path
.
join
(
root
,
name
)).
st_mtime
))
time
.
ctime
(
os
.
stat
(
os
.
path
.
join
(
root
,
name
)).
st_mtime
))
)
return
True
return
True
return
False
return
False
...
@@ -41,7 +43,7 @@ def isProcessOlderThanDependencySet(pid, python_path_list, kill=False):
...
@@ -41,7 +43,7 @@ def isProcessOlderThanDependencySet(pid, python_path_list, kill=False):
start_time
=
process
.
create_time
()
start_time
=
process
.
create_time
()
if
any
(
moduleIsModifiedSince
(
product_path
,
start_time
)
for
product_path
in
python_path_list
):
if
any
(
moduleIsModifiedSince
(
product_path
,
start_time
)
for
product_path
in
python_path_list
):
if
kill
:
if
kill
:
print
"Terminating process %s with pid %s"
%
(
process
.
name
(),
pid
)
print
(
"Terminating process %s with pid %s"
%
(
process
.
name
(),
pid
)
)
process
.
terminate
()
process
.
terminate
()
return
True
return
True
return
False
return
False
...
...
slapos/promise/plugin/backupserver_check_backup.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
TestResult
from
slapos.grid.promise.generic
import
TestResult
...
@@ -12,10 +12,9 @@ from croniter import croniter
...
@@ -12,10 +12,9 @@ from croniter import croniter
from
dateutil.parser
import
parse
from
dateutil.parser
import
parse
from
tzlocal
import
get_localzone
from
tzlocal
import
get_localzone
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# check backup ran OK every 5 minutes
# check backup ran OK every 5 minutes
...
@@ -52,7 +51,8 @@ class RunPromise(GenericPromise):
...
@@ -52,7 +51,8 @@ class RunPromise(GenericPromise):
# First, parse the log file
# First, parse the log file
backup_started
=
False
backup_started
=
False
backup_ended
=
False
backup_ended
=
False
for
line
in
open
(
status
,
'r'
):
with
open
(
status
,
'r'
)
as
f
:
for
line
in
f
:
m
=
re
.
match
(
r"(.*), (.*), (.*), backup (.*)$"
,
line
)
m
=
re
.
match
(
r"(.*), (.*), (.*), backup (.*)$"
,
line
)
if
m
:
if
m
:
if
m
.
group
(
4
)
==
"running"
:
if
m
.
group
(
4
)
==
"running"
:
...
...
slapos/promise/plugin/check_error_on_http_log.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
import
re
import
re
import
time
import
time
import
os
import
os
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# set periodicity to run the promise twice per day
# set periodicity to run the promise twice per day
...
@@ -26,7 +24,7 @@ class RunPromise(GenericPromise):
...
@@ -26,7 +24,7 @@ class RunPromise(GenericPromise):
if
not
log_file
:
if
not
log_file
:
raise
ValueError
(
"log file was not set in promise parameters."
)
raise
ValueError
(
"log file was not set in promise parameters."
)
regex
=
re
.
compile
(
"^(
\
[[^
\
]]+
\
]) (
\
[[^
\
]]+
\
]) (.*)$"
)
regex
=
re
.
compile
(
br
"^(\
[[^
\]]+\
]) (
\[[^\
]]+
\]) (.*)$"
)
error_amount
=
0
error_amount
=
0
no_route_error
=
0
no_route_error
=
0
network_is_unreachable
=
0
network_is_unreachable
=
0
...
@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
...
@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
self
.
logger
.
info
(
"OK"
)
self
.
logger
.
info
(
"OK"
)
return
return
with
open
(
log_file
)
as
f
:
with
open
(
log_file
,
"rb"
)
as
f
:
f
.
seek
(
0
,
2
)
f
.
seek
(
0
,
2
)
block_end_byte
=
f
.
tell
()
block_end_byte
=
f
.
tell
()
f
.
seek
(
-
min
(
block_end_byte
,
4096
),
1
)
f
.
seek
(
-
min
(
block_end_byte
,
4096
),
1
)
...
@@ -50,10 +48,10 @@ class RunPromise(GenericPromise):
...
@@ -50,10 +48,10 @@ class RunPromise(GenericPromise):
dt
,
level
,
msg
=
m
.
groups
()
dt
,
level
,
msg
=
m
.
groups
()
try
:
try
:
try
:
try
:
t
=
time
.
strptime
(
dt
[
1
:
-
1
],
"%a %b %d %H:%M:%S %Y"
)
t
=
time
.
strptime
(
dt
[
1
:
-
1
]
.
decode
(
'utf-8'
)
,
"%a %b %d %H:%M:%S %Y"
)
except
ValueError
:
except
ValueError
:
# Fail to parser for the first time, try a different output.
# Fail to parser for the first time, try a different output.
t
=
time
.
strptime
(
dt
[
1
:
-
1
],
"%a %b %d %H:%M:%S.%f %Y"
)
t
=
time
.
strptime
(
dt
[
1
:
-
1
]
.
decode
(
'utf-8'
)
,
"%a %b %d %H:%M:%S.%f %Y"
)
except
ValueError
:
except
ValueError
:
# Probably it fail to parse
# Probably it fail to parse
if
parsing_failure
<
3
:
if
parsing_failure
<
3
:
...
@@ -65,14 +63,14 @@ class RunPromise(GenericPromise):
...
@@ -65,14 +63,14 @@ class RunPromise(GenericPromise):
if
maximum_delay
and
(
time
.
time
()
-
time
.
mktime
(
t
))
>
maximum_delay
:
if
maximum_delay
and
(
time
.
time
()
-
time
.
mktime
(
t
))
>
maximum_delay
:
# no result in the latest hour
# no result in the latest hour
break
break
if
level
!=
"[error]"
:
if
level
!=
b
"[error]"
:
continue
continue
# Classify the types of errors
# Classify the types of errors
if
"(113)No route to host"
in
msg
:
if
b
"(113)No route to host"
in
msg
:
no_route_error
+=
1
no_route_error
+=
1
elif
"(101)Network is unreachable"
in
msg
:
elif
b
"(101)Network is unreachable"
in
msg
:
network_is_unreachable
+=
1
network_is_unreachable
+=
1
elif
"(110)Connection timed out"
in
msg
:
elif
b
"(110)Connection timed out"
in
msg
:
timeout
+=
1
timeout
+=
1
error_amount
+=
1
error_amount
+=
1
if
error_amount
:
if
error_amount
:
...
...
slapos/promise/plugin/check_error_on_zope_longrequest_log.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
import
time
import
time
...
@@ -7,12 +7,10 @@ import os
...
@@ -7,12 +7,10 @@ import os
import
sys
import
sys
import
re
import
re
r
=
re
.
compile
(
"^([0-9]+
\
-[
0
-9]+
\
-[
0
-9]+ [0-9]+
\
:[
0
-9]+
\
:[
0
-9]+)(
\
,[
0
-9]+) - ([A-z]+) (.*)$"
)
r
=
re
.
compile
(
br
"^([0-9]+\
-[
0-9]+\
-[
0-9]+ [0-9]+\
:[
0-9]+\
:[
0-9]+)(\
,[
0-9]+) - ([A-z]+) (.*)$"
)
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
self
.
setPeriodicity
(
minute
=
10
)
self
.
setPeriodicity
(
minute
=
10
)
...
@@ -27,7 +25,7 @@ class RunPromise(GenericPromise):
...
@@ -27,7 +25,7 @@ class RunPromise(GenericPromise):
self
.
logger
.
info
(
"log file does not exist: log check skipped"
)
self
.
logger
.
info
(
"log file does not exist: log check skipped"
)
return
0
return
0
with
open
(
log_file
)
as
f
:
with
open
(
log_file
,
"rb"
)
as
f
:
f
.
seek
(
0
,
2
)
f
.
seek
(
0
,
2
)
block_end_byte
=
f
.
tell
()
block_end_byte
=
f
.
tell
()
f
.
seek
(
-
min
(
block_end_byte
,
4096
*
10
),
1
)
f
.
seek
(
-
min
(
block_end_byte
,
4096
*
10
),
1
)
...
@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
...
@@ -38,7 +36,7 @@ class RunPromise(GenericPromise):
continue
continue
dt
,
_
,
level
,
msg
=
m
.
groups
()
dt
,
_
,
level
,
msg
=
m
.
groups
()
try
:
try
:
t
=
time
.
strptime
(
dt
,
"%Y-%m-%d %H:%M:%S"
)
t
=
time
.
strptime
(
dt
.
decode
(
'utf-8'
)
,
"%Y-%m-%d %H:%M:%S"
)
except
ValueError
:
except
ValueError
:
continue
continue
if
maximum_delay
and
(
time
.
time
()
-
time
.
mktime
(
t
))
>
maximum_delay
:
if
maximum_delay
and
(
time
.
time
()
-
time
.
mktime
(
t
))
>
maximum_delay
:
...
...
slapos/promise/plugin/check_file_state.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# SR can set custom periodicity
# SR can set custom periodicity
...
...
slapos/promise/plugin/check_free_disk_space.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
...
@@ -12,10 +12,9 @@ import psutil
...
@@ -12,10 +12,9 @@ import psutil
from
slapos.collect.db
import
Database
from
slapos.collect.db
import
Database
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# check disk space at least every 3 minutes
# check disk space at least every 3 minutes
...
@@ -29,12 +28,12 @@ class RunPromise(GenericPromise):
...
@@ -29,12 +28,12 @@ class RunPromise(GenericPromise):
database
.
connect
()
database
.
connect
()
where_query
=
"time between '%s:00' and '%s:30' and partition='%s'"
%
(
time
,
time
,
disk_partition
)
where_query
=
"time between '%s:00' and '%s:30' and partition='%s'"
%
(
time
,
time
,
disk_partition
)
query_result
=
database
.
select
(
"disk"
,
date
,
"free"
,
where
=
where_query
)
query_result
=
database
.
select
(
"disk"
,
date
,
"free"
,
where
=
where_query
)
result
=
zip
(
*
query_result
)
result
=
query_result
.
fetchone
(
)
if
not
result
or
not
result
[
0
]
[
0
]
:
if
not
result
or
not
result
[
0
]:
self
.
logger
.
info
(
"No result from collector database: disk check skipped"
)
self
.
logger
.
info
(
"No result from collector database: disk check skipped"
)
return
0
return
0
disk_free
=
result
[
0
]
[
0
]
disk_free
=
result
[
0
]
except
sqlite3
.
OperationalError
,
e
:
except
sqlite3
.
OperationalError
as
e
:
# if database is still locked after timeout expiration (another process is using it)
# if database is still locked after timeout expiration (another process is using it)
# we print warning message and try the promise at next run until max warn count
# we print warning message and try the promise at next run until max warn count
locked_message
=
"database is locked"
locked_message
=
"database is locked"
...
...
slapos/promise/plugin/check_icmp_packet_lost.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
import
re
import
re
import
time
import
time
from
slapos.networkbench.ping
import
ping
,
ping6
from
slapos.networkbench.ping
import
ping
,
ping6
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# set periodicity to run the promise twice per day
# set periodicity to run the promise twice per day
...
...
slapos/promise/plugin/check_partition_deployment_state.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
import
os
import
os
from
datetime
import
datetime
from
datetime
import
datetime
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
self
.
setPeriodicity
(
minute
=
1
)
self
.
setPeriodicity
(
minute
=
1
)
...
...
slapos/promise/plugin/check_re6st_optimal_status.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
from
slapos.grid.promise.generic
import
GenericPromise
,
TestResult
import
re
import
re
import
time
import
time
from
slapos.networkbench.ping
import
ping
,
ping6
from
slapos.networkbench.ping
import
ping
,
ping6
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# set periodicity to run the promise twice per day
# set periodicity to run the promise twice per day
...
...
slapos/promise/plugin/check_server_cpu_load.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
import
subprocess
import
subprocess
import
os
import
os
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# test load every 3 minutes
# test load every 3 minutes
...
@@ -17,7 +16,7 @@ class RunPromise(GenericPromise):
...
@@ -17,7 +16,7 @@ class RunPromise(GenericPromise):
def
checkCPULoad
(
self
,
tolerance
=
2.2
):
def
checkCPULoad
(
self
,
tolerance
=
2.2
):
# tolerance=1.5 => accept CPU load up to 1.5 =150%
# tolerance=1.5 => accept CPU load up to 1.5 =150%
uptime_result
=
subprocess
.
check_output
([
'uptime'
])
uptime_result
=
subprocess
.
check_output
([
'uptime'
]
,
universal_newlines
=
True
)
line
=
uptime_result
.
strip
().
split
(
' '
)
line
=
uptime_result
.
strip
().
split
(
' '
)
load
,
load5
,
long_load
=
line
[
-
3
:]
load
,
load5
,
long_load
=
line
[
-
3
:]
long_load
=
float
(
long_load
.
replace
(
','
,
'.'
))
long_load
=
float
(
long_load
.
replace
(
','
,
'.'
))
...
@@ -44,7 +43,7 @@ class RunPromise(GenericPromise):
...
@@ -44,7 +43,7 @@ class RunPromise(GenericPromise):
if
load_threshold
is
not
None
:
if
load_threshold
is
not
None
:
try
:
try
:
threshold
=
float
(
load_threshold
)
threshold
=
float
(
load_threshold
)
except
ValueError
,
e
:
except
ValueError
as
e
:
self
.
logger
.
error
(
"CPU load threshold %r is not valid: %s"
%
(
load_threshold
,
e
))
self
.
logger
.
error
(
"CPU load threshold %r is not valid: %s"
%
(
load_threshold
,
e
))
return
return
...
...
slapos/promise/plugin/check_url_available.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
import
requests
import
requests
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
# SR can set custom periodicity
# SR can set custom periodicity
...
@@ -47,7 +45,7 @@ class RunPromise(GenericPromise):
...
@@ -47,7 +45,7 @@ class RunPromise(GenericPromise):
result
=
requests
.
get
(
result
=
requests
.
get
(
url
,
verify
=
verify
,
allow_redirects
=
True
,
timeout
=
timeout
,
cert
=
cert
)
url
,
verify
=
verify
,
allow_redirects
=
True
,
timeout
=
timeout
,
cert
=
cert
)
except
requests
.
exceptions
.
SSLError
as
e
:
except
requests
.
exceptions
.
SSLError
as
e
:
if
'certificate verify failed'
in
str
(
e
.
message
):
if
'certificate verify failed'
in
str
(
e
):
self
.
logger
.
error
(
self
.
logger
.
error
(
"ERROR SSL verify failed while accessing %r"
%
(
url
,))
"ERROR SSL verify failed while accessing %r"
%
(
url
,))
else
:
else
:
...
@@ -58,7 +56,7 @@ class RunPromise(GenericPromise):
...
@@ -58,7 +56,7 @@ class RunPromise(GenericPromise):
self
.
logger
.
error
(
self
.
logger
.
error
(
"ERROR connection not possible while accessing %r"
%
(
url
,
))
"ERROR connection not possible while accessing %r"
%
(
url
,
))
return
return
except
Exception
,
e
:
except
Exception
as
e
:
self
.
logger
.
error
(
"ERROR: %s"
%
(
e
,))
self
.
logger
.
error
(
"ERROR: %s"
%
(
e
,))
return
return
...
...
slapos/promise/plugin/monitor_bootstrap_status.py
View file @
1c8269b2
from
zope
import
interface
as
zope_interface
from
zope
.interface
import
implementer
from
slapos.grid.promise
import
interface
from
slapos.grid.promise
import
interface
from
slapos.grid.promise.generic
import
GenericPromise
from
slapos.grid.promise.generic
import
GenericPromise
import
os
import
os
...
@@ -6,10 +6,9 @@ import time
...
@@ -6,10 +6,9 @@ import time
import
psutil
import
psutil
from
.util
import
tail_file
from
.util
import
tail_file
@
implementer
(
interface
.
IPromise
)
class
RunPromise
(
GenericPromise
):
class
RunPromise
(
GenericPromise
):
zope_interface
.
implements
(
interface
.
IPromise
)
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
GenericPromise
.
__init__
(
self
,
config
)
GenericPromise
.
__init__
(
self
,
config
)
self
.
setPeriodicity
(
minute
=
2
)
self
.
setPeriodicity
(
minute
=
2
)
...
@@ -23,7 +22,7 @@ class RunPromise(GenericPromise):
...
@@ -23,7 +22,7 @@ class RunPromise(GenericPromise):
with
open
(
process_pid_file
)
as
f
:
with
open
(
process_pid_file
)
as
f
:
try
:
try
:
pid
=
int
(
f
.
read
())
pid
=
int
(
f
.
read
())
except
ValueError
,
e
:
except
ValueError
as
e
:
raise
ValueError
(
"%r is empty or doesn't contain a valid pid number: %s"
%
(
raise
ValueError
(
"%r is empty or doesn't contain a valid pid number: %s"
%
(
process_pid_file
,
str
(
e
)))
process_pid_file
,
str
(
e
)))
...
...
slapos/pubsub/__init__.py
View file @
1c8269b2
import
argparse
import
argparse
import
csv
import
csv
import
feedparser
import
feedparser
import
httplib
# To avoid magic numbers
from
six.moves
import
http_client
as
httplib
# To avoid magic numbers
import
io
import
io
import
json
import
json
import
logging
import
logging
...
...
slapos/qemuqmpclient/__init__.py
View file @
1c8269b2
...
@@ -25,6 +25,8 @@
...
@@ -25,6 +25,8 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
argparse
import
argparse
import
json
import
json
import
os
import
os
...
@@ -71,7 +73,7 @@ def getInitialQemuResourceDict(pid_file):
...
@@ -71,7 +73,7 @@ def getInitialQemuResourceDict(pid_file):
with
open
(
pid_file
)
as
f
:
with
open
(
pid_file
)
as
f
:
try
:
try
:
pid
=
int
(
f
.
read
())
pid
=
int
(
f
.
read
())
except
ValueError
,
e
:
except
ValueError
as
e
:
raise
ValueError
(
"%r is empty or doesn't contain a valid pid number: %s"
%
(
raise
ValueError
(
"%r is empty or doesn't contain a valid pid number: %s"
%
(
pid_file
,
str
(
e
)))
pid_file
,
str
(
e
)))
...
@@ -81,7 +83,7 @@ def getInitialQemuResourceDict(pid_file):
...
@@ -81,7 +83,7 @@ def getInitialQemuResourceDict(pid_file):
process
=
psutil
.
Process
(
pid
)
process
=
psutil
.
Process
(
pid
)
break
break
except
psutil
.
NoSuchProcess
:
except
psutil
.
NoSuchProcess
:
print
"Qemu process is not started yet..."
print
(
"Qemu process is not started yet..."
)
wait_count
-=
1
wait_count
-=
1
time
.
sleep
(
0.5
)
time
.
sleep
(
0.5
)
else
:
else
:
...
@@ -130,7 +132,7 @@ class QemuQMPWrapper(object):
...
@@ -130,7 +132,7 @@ class QemuQMPWrapper(object):
if
not
os
.
path
.
exists
(
unix_socket_location
):
if
not
os
.
path
.
exists
(
unix_socket_location
):
raise
Exception
(
'unix socket %s does not exist.'
%
unix_socket_location
)
raise
Exception
(
'unix socket %s does not exist.'
%
unix_socket_location
)
print
'Connecting to qemu...'
print
(
'Connecting to qemu...'
)
so
=
socket
.
socket
(
socket
.
AF_UNIX
,
socket
.
SOCK_STREAM
)
so
=
socket
.
socket
(
socket
.
AF_UNIX
,
socket
.
SOCK_STREAM
)
connected
=
False
connected
=
False
while
not
connected
:
while
not
connected
:
...
@@ -138,7 +140,7 @@ class QemuQMPWrapper(object):
...
@@ -138,7 +140,7 @@ class QemuQMPWrapper(object):
so
.
connect
(
unix_socket_location
)
so
.
connect
(
unix_socket_location
)
except
socket
.
error
:
except
socket
.
error
:
time
.
sleep
(
1
)
time
.
sleep
(
1
)
print
'Could not connect, retrying...'
print
(
'Could not connect, retrying...'
)
else
:
else
:
connected
=
True
connected
=
True
so
.
recv
(
1024
)
so
.
recv
(
1024
)
...
@@ -159,7 +161,7 @@ class QemuQMPWrapper(object):
...
@@ -159,7 +161,7 @@ class QemuQMPWrapper(object):
raise
QmpCommandError
(
response
[
"error"
][
"desc"
])
raise
QmpCommandError
(
response
[
"error"
][
"desc"
])
if
'event'
in
response
:
if
'event'
in
response
:
self
.
_event_list
.
append
(
response
)
self
.
_event_list
.
append
(
response
)
print
response
print
(
response
)
if
not
only_event
:
if
not
only_event
:
continue
continue
...
@@ -171,7 +173,7 @@ class QemuQMPWrapper(object):
...
@@ -171,7 +173,7 @@ class QemuQMPWrapper(object):
for
i
in
range
(
0
,
retry
):
for
i
in
range
(
0
,
retry
):
if
response
is
not
None
:
if
response
is
not
None
:
break
break
print
"Retrying send command after %s second(s)..."
%
sleep
print
(
"Retrying send command after %s second(s)..."
%
sleep
)
time
.
sleep
(
sleep
)
time
.
sleep
(
sleep
)
self
.
socket
.
sendall
(
json
.
dumps
(
message
))
self
.
socket
.
sendall
(
json
.
dumps
(
message
))
response
=
self
.
_readResponse
()
response
=
self
.
_readResponse
()
...
@@ -191,14 +193,14 @@ class QemuQMPWrapper(object):
...
@@ -191,14 +193,14 @@ class QemuQMPWrapper(object):
if
actual_status
==
wanted_status
:
if
actual_status
==
wanted_status
:
return
return
else
:
else
:
print
'VM in %s status, wanting it to be %s, retrying...'
%
(
print
(
'VM in %s status, wanting it to be %s, retrying...'
%
(
actual_status
,
wanted_status
)
actual_status
,
wanted_status
)
)
time
.
sleep
(
1
)
time
.
sleep
(
1
)
except
IOError
:
except
IOError
:
print
'VM not ready, retrying...'
print
(
'VM not ready, retrying...'
)
def
capabilities
(
self
):
def
capabilities
(
self
):
print
'Asking for capabilities...'
print
(
'Asking for capabilities...'
)
self
.
_send
({
'execute'
:
'qmp_capabilities'
})
self
.
_send
({
'execute'
:
'qmp_capabilities'
})
def
getEventList
(
self
,
timeout
=
0
,
cleanup
=
False
):
def
getEventList
(
self
,
timeout
=
0
,
cleanup
=
False
):
...
@@ -223,7 +225,7 @@ class QemuQMPWrapper(object):
...
@@ -223,7 +225,7 @@ class QemuQMPWrapper(object):
self
.
socket
.
setblocking
(
0
)
self
.
socket
.
setblocking
(
0
)
try
:
try
:
self
.
_readResponse
(
only_event
=
True
)
self
.
_readResponse
(
only_event
=
True
)
except
socket
.
error
,
err
:
except
socket
.
error
as
err
:
if
err
[
0
]
==
errno
.
EAGAIN
:
if
err
[
0
]
==
errno
.
EAGAIN
:
# No data available
# No data available
pass
pass
...
@@ -240,7 +242,7 @@ class QemuQMPWrapper(object):
...
@@ -240,7 +242,7 @@ class QemuQMPWrapper(object):
def
setVNCPassword
(
self
,
password
):
def
setVNCPassword
(
self
,
password
):
# Set VNC password
# Set VNC password
print
'Setting VNC password...'
print
(
'Setting VNC password...'
)
result
=
self
.
_send
({
result
=
self
.
_send
({
"execute"
:
"change"
,
"execute"
:
"change"
,
"arguments"
:
{
"arguments"
:
{
...
@@ -251,19 +253,19 @@ class QemuQMPWrapper(object):
...
@@ -251,19 +253,19 @@ class QemuQMPWrapper(object):
})
})
if
result
and
result
.
get
(
'return'
,
None
)
!=
{}:
if
result
and
result
.
get
(
'return'
,
None
)
!=
{}:
raise
ValueError
(
result
)
raise
ValueError
(
result
)
print
'Done.'
print
(
'Done.'
)
def
powerdown
(
self
):
def
powerdown
(
self
):
print
'Stopping the VM...'
print
(
'Stopping the VM...'
)
self
.
_send
({
'execute'
:
'system_powerdown'
})
self
.
_send
({
'execute'
:
'system_powerdown'
})
def
suspend
(
self
):
def
suspend
(
self
):
print
'Suspending VM...'
print
(
'Suspending VM...'
)
self
.
_send
({
'execute'
:
'stop'
})
self
.
_send
({
'execute'
:
'stop'
})
self
.
_waitForVMStatus
(
'paused'
)
self
.
_waitForVMStatus
(
'paused'
)
def
resume
(
self
):
def
resume
(
self
):
print
'Resuming VM...'
print
(
'Resuming VM...'
)
self
.
_send
({
'execute'
:
'cont'
})
self
.
_send
({
'execute'
:
'cont'
})
self
.
_waitForVMStatus
(
'running'
)
self
.
_waitForVMStatus
(
'running'
)
...
@@ -285,7 +287,7 @@ class QemuQMPWrapper(object):
...
@@ -285,7 +287,7 @@ class QemuQMPWrapper(object):
return
return
def
driveBackup
(
self
,
backup_target
,
source_device
=
'virtio0'
,
sync_type
=
'full'
):
def
driveBackup
(
self
,
backup_target
,
source_device
=
'virtio0'
,
sync_type
=
'full'
):
print
'Asking Qemu to perform backup to %s'
%
backup_target
print
(
'Asking Qemu to perform backup to %s'
%
backup_target
)
# XXX: check for error
# XXX: check for error
self
.
_send
({
self
.
_send
({
'execute'
:
'drive-backup'
,
'execute'
:
'drive-backup'
,
...
@@ -296,17 +298,17 @@ class QemuQMPWrapper(object):
...
@@ -296,17 +298,17 @@ class QemuQMPWrapper(object):
}
}
})
})
while
self
.
_getRunningJobList
(
backup_target
):
while
self
.
_getRunningJobList
(
backup_target
):
print
'Job is not finished yet.'
print
(
'Job is not finished yet.'
)
time
.
sleep
(
20
)
time
.
sleep
(
20
)
def
createSnapshot
(
self
,
snapshot_file
,
device
=
'virtio0'
):
def
createSnapshot
(
self
,
snapshot_file
,
device
=
'virtio0'
):
print
self
.
_send
({
print
(
self
.
_send
({
'execute'
:
'blockdev-snapshot-sync'
,
'execute'
:
'blockdev-snapshot-sync'
,
'arguments'
:
{
'arguments'
:
{
'device'
:
device
,
'device'
:
device
,
'snapshot-file'
:
snapshot_file
,
'snapshot-file'
:
snapshot_file
,
}
}
})
})
)
def
createInternalSnapshot
(
self
,
name
=
None
,
device
=
'virtio0'
):
def
createInternalSnapshot
(
self
,
name
=
None
,
device
=
'virtio0'
):
if
name
is
None
:
if
name
is
None
:
...
@@ -372,9 +374,9 @@ class QemuQMPWrapper(object):
...
@@ -372,9 +374,9 @@ class QemuQMPWrapper(object):
try
:
try
:
if
resend
:
if
resend
:
result
=
self
.
_send
(
command_dict
)
result
=
self
.
_send
(
command_dict
)
except
QmpCommandError
,
e
:
except
QmpCommandError
as
e
:
print
"ERROR: "
,
str
(
e
)
print
(
"ERROR: "
,
e
)
print
"%s
\
n
Retry remove %r in few seconds..."
%
(
result
,
dev_id
)
print
(
"%s
\
n
Retry remove %r in few seconds..."
%
(
result
,
dev_id
)
)
resend
=
True
resend
=
True
else
:
else
:
for
event
in
self
.
getEventList
(
timeout
=
2
,
cleanup
=
True
):
for
event
in
self
.
getEventList
(
timeout
=
2
,
cleanup
=
True
):
...
@@ -388,13 +390,13 @@ class QemuQMPWrapper(object):
...
@@ -388,13 +390,13 @@ class QemuQMPWrapper(object):
if
stop_retry
:
if
stop_retry
:
break
break
elif
result
is
None
and
max_retry
>
0
:
elif
result
is
None
and
max_retry
>
0
:
print
"Retry remove %r in few seconds..."
%
dev_id
print
(
"Retry remove %r in few seconds..."
%
dev_id
)
time
.
sleep
(
2
)
time
.
sleep
(
2
)
if
result
is
not
None
:
if
result
is
not
None
:
if
result
.
get
(
'return'
,
None
)
==
{}
or
(
'error'
in
result
and
\
if
result
.
get
(
'return'
,
None
)
==
{}
or
(
'error'
in
result
and
\
result
[
'error'
].
get
(
'class'
,
''
)
==
'DeviceNotFound'
):
result
[
'error'
].
get
(
'class'
,
''
)
==
'DeviceNotFound'
):
print
'Device %s was removed.'
%
dev_id
print
(
'Device %s was removed.'
%
dev_id
)
return
return
# device was not remove after retries
# device was not remove after retries
...
@@ -417,7 +419,7 @@ class QemuQMPWrapper(object):
...
@@ -417,7 +419,7 @@ class QemuQMPWrapper(object):
if
not
system_exited
:
if
not
system_exited
:
# hard reset the VM
# hard reset the VM
print
"Trying hard shutdown of the VM..."
print
(
"Trying hard shutdown of the VM..."
)
self
.
_send
({
"execute"
:
"quit"
})
self
.
_send
({
"execute"
:
"quit"
})
raise
QmpDeviceRemoveError
(
"Stopped Qemu in order to remove the device %r"
%
dev_id
)
raise
QmpDeviceRemoveError
(
"Stopped Qemu in order to remove the device %r"
%
dev_id
)
...
@@ -459,14 +461,14 @@ class QemuQMPWrapper(object):
...
@@ -459,14 +461,14 @@ class QemuQMPWrapper(object):
if
cpu_amount
==
hotplug_amount
:
if
cpu_amount
==
hotplug_amount
:
# no chanches
# no chanches
print
"Hotplug CPU is up to date."
print
(
"Hotplug CPU is up to date."
)
return
return
if
cpu_amount
>
hotplug_amount
:
if
cpu_amount
>
hotplug_amount
:
# we will remove CPU
# we will remove CPU
cpu_diff
=
-
1
*
cpu_diff
cpu_diff
=
-
1
*
cpu_diff
if
cpu_diff
>=
1
:
if
cpu_diff
>=
1
:
print
"Request remove %s CPUs..."
%
cpu_diff
print
(
"Request remove %s CPUs..."
%
cpu_diff
)
used_socket_id_list
.
reverse
()
used_socket_id_list
.
reverse
()
for
i
in
range
(
0
,
cpu_diff
):
for
i
in
range
(
0
,
cpu_diff
):
self
.
_removeDevice
(
used_socket_id_list
[
i
],
{
self
.
_removeDevice
(
used_socket_id_list
[
i
],
{
...
@@ -478,7 +480,7 @@ class QemuQMPWrapper(object):
...
@@ -478,7 +480,7 @@ class QemuQMPWrapper(object):
# no hotplugable cpu socket found for Add
# no hotplugable cpu socket found for Add
raise
ValueError
(
"Cannot Configure %s CPUs, the maximum amount of "
\
raise
ValueError
(
"Cannot Configure %s CPUs, the maximum amount of "
\
"hotplugable CPU is %s!"
%
(
hotplug_amount
,
max_hotplug_cpu
))
"hotplugable CPU is %s!"
%
(
hotplug_amount
,
max_hotplug_cpu
))
print
"Adding %s CPUs..."
%
cpu_diff
print
(
"Adding %s CPUs..."
%
cpu_diff
)
for
i
in
range
(
0
,
cpu_diff
):
for
i
in
range
(
0
,
cpu_diff
):
self
.
_send
({
self
.
_send
({
'execute'
:
'device_add'
,
'execute'
:
'device_add'
,
...
@@ -491,10 +493,10 @@ class QemuQMPWrapper(object):
...
@@ -491,10 +493,10 @@ class QemuQMPWrapper(object):
if
hotplug_amount
!=
final_cpu_count
:
if
hotplug_amount
!=
final_cpu_count
:
raise
ValueError
(
"Consistency error: Expected %s hotplugged CPU(s) but"
\
raise
ValueError
(
"Consistency error: Expected %s hotplugged CPU(s) but"
\
" current CPU amount is %s"
%
(
hotplug_amount
,
final_cpu_count
))
" current CPU amount is %s"
%
(
hotplug_amount
,
final_cpu_count
))
print
"Done."
print
(
"Done."
)
def
_removeMemory
(
self
,
id_dict
,
auto_reboot
=
False
):
def
_removeMemory
(
self
,
id_dict
,
auto_reboot
=
False
):
print
"Trying to remove devices %s, %s..."
%
(
id_dict
[
'id'
],
id_dict
[
'memdev'
]
)
print
(
"Trying to remove devices %s, %s..."
%
(
id_dict
[
'id'
],
id_dict
[
'memdev'
])
)
self
.
_removeDevice
(
id_dict
[
'id'
]
,{
self
.
_removeDevice
(
id_dict
[
'id'
]
,{
'execute'
:
'device_del'
,
'execute'
:
'device_del'
,
'arguments'
:
{
'id'
:
id_dict
[
'id'
]}
'arguments'
:
{
'id'
:
id_dict
[
'id'
]}
...
@@ -544,7 +546,7 @@ class QemuQMPWrapper(object):
...
@@ -544,7 +546,7 @@ class QemuQMPWrapper(object):
# cleanup memdev that was not removed because of failure
# cleanup memdev that was not removed because of failure
for
memdev
in
cleanup_memdev_id_dict
.
keys
():
for
memdev
in
cleanup_memdev_id_dict
.
keys
():
print
"Cleaning up memdev %s..."
%
memdev
print
(
"Cleaning up memdev %s..."
%
memdev
)
self
.
_removeDevice
(
memdev
,
{
self
.
_removeDevice
(
memdev
,
{
'execute'
:
'object-del'
,
'execute'
:
'object-del'
,
'arguments'
:
{
'arguments'
:
{
...
@@ -559,9 +561,9 @@ class QemuQMPWrapper(object):
...
@@ -559,9 +561,9 @@ class QemuQMPWrapper(object):
if
(
mem_size
/
slot_size
)
>
slot_amount
:
if
(
mem_size
/
slot_size
)
>
slot_amount
:
raise
ValueError
(
"No enough slots available to add %sMB of RAM"
%
mem_size
)
raise
ValueError
(
"No enough slots available to add %sMB of RAM"
%
mem_size
)
current_size
=
current_size
/
(
1024
*
1024
)
current_size
//=
(
1024
*
1024
)
if
current_size
==
mem_size
:
if
current_size
==
mem_size
:
print
"Hotplug Memory size is up to date."
print
(
"Hotplug Memory size is up to date."
)
return
return
if
mem_size
<
0
:
if
mem_size
<
0
:
...
@@ -569,7 +571,7 @@ class QemuQMPWrapper(object):
...
@@ -569,7 +571,7 @@ class QemuQMPWrapper(object):
elif
current_size
>
mem_size
:
elif
current_size
>
mem_size
:
# Request to remove memory
# Request to remove memory
to_remove_size
=
current_size
-
mem_size
to_remove_size
=
current_size
-
mem_size
print
"Removing %s MB of memory..."
%
to_remove_size
print
(
"Removing %s MB of memory..."
%
to_remove_size
)
for
i
in
range
(
num_slot_used
,
0
,
-
1
):
for
i
in
range
(
num_slot_used
,
0
,
-
1
):
# remove all slots that won't be used
# remove all slots that won't be used
...
@@ -587,9 +589,9 @@ class QemuQMPWrapper(object):
...
@@ -587,9 +589,9 @@ class QemuQMPWrapper(object):
)
)
elif
current_size
<
mem_size
:
elif
current_size
<
mem_size
:
# ask for increase memory
# ask for increase memory
slot_add
=
(
mem_size
-
current_size
)
/
slot_size
slot_add
=
(
mem_size
-
current_size
)
/
/
slot_size
print
"Adding %s memory slot(s) of %s MB..."
%
(
slot_add
,
slot_size
)
print
(
"Adding %s memory slot(s) of %s MB..."
%
(
slot_add
,
slot_size
)
)
for
i
in
range
(
0
,
slot_add
):
for
i
in
range
(
0
,
slot_add
):
index
=
num_slot_used
+
i
+
1
index
=
num_slot_used
+
i
+
1
self
.
_send
({
self
.
_send
({
...
@@ -618,11 +620,11 @@ class QemuQMPWrapper(object):
...
@@ -618,11 +620,11 @@ class QemuQMPWrapper(object):
if
mem_size
!=
final_mem_size
:
if
mem_size
!=
final_mem_size
:
raise
ValueError
(
"Consistency error: Expected %s MB of hotplugged RAM "
\
raise
ValueError
(
"Consistency error: Expected %s MB of hotplugged RAM "
\
"but current RAM size is %s MB"
%
(
mem_size
,
final_mem_size
))
"but current RAM size is %s MB"
%
(
mem_size
,
final_mem_size
))
print
"Done."
print
(
"Done."
)
def
updateDevice
(
self
,
option_dict
):
def
updateDevice
(
self
,
option_dict
):
argument_dict
=
{}
argument_dict
=
{}
if
option_dict
.
has_key
(
'device'
)
:
if
'device'
in
option_dict
:
if
option_dict
[
'device'
]
==
'cpu'
:
if
option_dict
[
'device'
]
==
'cpu'
:
return
self
.
_updateCPU
(
return
self
.
_updateCPU
(
amount
=
int
(
option_dict
[
'amount'
]),
amount
=
int
(
option_dict
[
'amount'
]),
...
...
slapos/resilient/runner_exporter.py
View file @
1c8269b2
...
@@ -68,7 +68,8 @@ def synchroniseRunnerConfigurationDirectory(config, backup_path):
...
@@ -68,7 +68,8 @@ def synchroniseRunnerConfigurationDirectory(config, backup_path):
os
.
makedirs
(
backup_path
)
os
.
makedirs
(
backup_path
)
file_list
=
[
'config.json'
]
file_list
=
[
'config.json'
]
for
hidden_file
in
os
.
listdir
(
'.'
):
# `sorted` is used for Python 2-3 compatibility
for
hidden_file
in
sorted
(
os
.
listdir
(
'.'
)):
if
hidden_file
[
0
]
==
'.'
:
if
hidden_file
[
0
]
==
'.'
:
file_list
.
append
(
hidden_file
)
file_list
.
append
(
hidden_file
)
rsync
(
config
.
rsync_binary
,
file_list
,
backup_path
,
dry
=
config
.
dry
)
rsync
(
config
.
rsync_binary
,
file_list
,
backup_path
,
dry
=
config
.
dry
)
...
@@ -80,7 +81,8 @@ def synchroniseRunnerWorkingDirectory(config, backup_path):
...
@@ -80,7 +81,8 @@ def synchroniseRunnerWorkingDirectory(config, backup_path):
if
os
.
path
.
isdir
(
'instance'
):
if
os
.
path
.
isdir
(
'instance'
):
file_list
.
append
(
'instance'
)
file_list
.
append
(
'instance'
)
exclude_list
=
getExcludePathList
(
os
.
getcwd
())
# `sorted` is used for Python 2-3 compatibility
exclude_list
=
sorted
(
getExcludePathList
(
os
.
getcwd
()))
# XXX: proxy.db should be properly dumped to leverage its
# XXX: proxy.db should be properly dumped to leverage its
# atomic properties
# atomic properties
...
...
slapos/resilient/runner_utils.py
View file @
1c8269b2
...
@@ -7,6 +7,9 @@ import sys
...
@@ -7,6 +7,9 @@ import sys
from
contextlib
import
contextmanager
from
contextlib
import
contextmanager
from
hashlib
import
sha256
from
hashlib
import
sha256
from
zc.buildout.configparser
import
parse
from
zc.buildout.configparser
import
parse
from
slapos.util
import
bytes2str
,
str2bytes
import
six
@
contextmanager
@
contextmanager
...
@@ -63,7 +66,7 @@ def getExcludePathList(path):
...
@@ -63,7 +66,7 @@ def getExcludePathList(path):
if
e
.
errno
!=
errno
.
ENOENT
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
raise
else
:
else
:
for
section
in
installed
.
itervalues
(
):
for
section
in
six
.
itervalues
(
installed
):
append_relative
(
section
.
get
(
append_relative
(
section
.
get
(
'__buildout_installed__'
,
''
).
splitlines
())
'__buildout_installed__'
,
''
).
splitlines
())
...
@@ -129,7 +132,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
...
@@ -129,7 +132,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
if
signature_process
:
if
signature_process
:
(
output
,
error_output
)
=
signature_process
.
communicate
(
(
output
,
error_output
)
=
signature_process
.
communicate
(
'
\
0
'
.
join
(
filepath_list
)
str2bytes
(
'
\
0
'
.
join
(
filepath_list
)
)
)
)
if
signature_process
.
returncode
!=
0
:
if
signature_process
.
returncode
!=
0
:
...
@@ -143,7 +146,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
...
@@ -143,7 +146,7 @@ def writeSignatureFile(slappart_signature_method_dict, runner_working_path, sign
# We have to rstrip as most programs return an empty line
# We have to rstrip as most programs return an empty line
# at the end of their output
# at the end of their output
signature_list
.
extend
(
output
.
strip
(
'
\
n
'
).
split
(
'
\
n
'
))
signature_list
.
extend
(
bytes2str
(
output
)
.
strip
(
'
\
n
'
).
split
(
'
\
n
'
))
else
:
else
:
signature_list
.
extend
(
signature_list
.
extend
(
getSha256Sum
(
filepath_list
)
getSha256Sum
(
filepath_list
)
...
...
slapos/runner/runnertest.py
View file @
1c8269b2
...
@@ -10,20 +10,23 @@
...
@@ -10,20 +10,23 @@
# or it will NOT work
# or it will NOT work
#############################################
#############################################
from
__future__
import
print_function
import
argparse
import
argparse
import
base64
import
base64
import
ConfigParser
from
six.moves.configparser
import
Safe
ConfigParser
import
datetime
import
datetime
import
hashlib
import
hashlib
import
json
import
json
import
os
import
os
import
shutil
import
shutil
import
sup_process
from
.
import
sup_process
from
StringIO
import
StringIO
from
io
import
StringIO
import
ssl
import
ssl
import
time
import
time
import
unittest
import
unittest
import
urllib2
from
six.moves.urllib.request
import
Request
,
urlopen
import
six
from
slapos.runner.utils
import
(
getProfilePath
,
from
slapos.runner.utils
import
(
getProfilePath
,
getSession
,
isInstanceRunning
,
getSession
,
isInstanceRunning
,
...
@@ -125,7 +128,7 @@ class SlaprunnerTestCase(unittest.TestCase):
...
@@ -125,7 +128,7 @@ class SlaprunnerTestCase(unittest.TestCase):
partition_id
=
cls
.
partition_id
partition_id
=
cls
.
partition_id
)
)
cls
.
parameter_dict
=
cls
.
partition
.
getConnectionParameterDict
()
cls
.
parameter_dict
=
cls
.
partition
.
getConnectionParameterDict
()
for
attribute
,
value
in
cls
.
parameter_dict
.
iteritems
(
):
for
attribute
,
value
in
six
.
iteritems
(
cls
.
parameter_dict
):
setattr
(
cls
,
attribute
.
replace
(
'-'
,
'_'
),
value
)
setattr
(
cls
,
attribute
.
replace
(
'-'
,
'_'
),
value
)
#create slaprunner configuration
#create slaprunner configuration
...
@@ -188,7 +191,7 @@ class SlaprunnerTestCase(unittest.TestCase):
...
@@ -188,7 +191,7 @@ class SlaprunnerTestCase(unittest.TestCase):
shutil
.
rmtree
(
self
.
app
.
config
[
'software_link'
])
shutil
.
rmtree
(
self
.
app
.
config
[
'software_link'
])
def
updateConfigParameter
(
self
,
parameter
,
value
):
def
updateConfigParameter
(
self
,
parameter
,
value
):
config_parser
=
ConfigParser
.
SafeConfigParser
()
config_parser
=
SafeConfigParser
()
config_parser
.
read
(
os
.
getenv
(
'RUNNER_CONFIG'
))
config_parser
.
read
(
os
.
getenv
(
'RUNNER_CONFIG'
))
for
section
in
config_parser
.
sections
():
for
section
in
config_parser
.
sections
():
if
config_parser
.
has_option
(
section
,
parameter
):
if
config_parser
.
has_option
(
section
,
parameter
):
...
@@ -256,11 +259,11 @@ setuptools = 33.1.1
...
@@ -256,11 +259,11 @@ setuptools = 33.1.1
open
(
template
,
"w"
).
write
(
content
)
open
(
template
,
"w"
).
write
(
content
)
def
assertCanLoginWith
(
self
,
username
,
password
):
def
assertCanLoginWith
(
self
,
username
,
password
):
request
=
urllib2
.
Request
(
self
.
backend_url
)
request
=
Request
(
self
.
backend_url
)
base64string
=
base64
.
encodestring
(
'%s:%s'
%
(
username
,
password
))[:
-
1
]
base64string
=
base64
.
encodestring
(
'%s:%s'
%
(
username
,
password
))[:
-
1
]
request
.
add_header
(
"Authorization"
,
"Basic %s"
%
base64string
)
request
.
add_header
(
"Authorization"
,
"Basic %s"
%
base64string
)
ssl_context
=
ssl
.
_create_unverified_context
()
ssl_context
=
ssl
.
_create_unverified_context
()
result
=
url
lib2
.
url
open
(
request
,
context
=
ssl_context
)
result
=
urlopen
(
request
,
context
=
ssl_context
)
self
.
assertEqual
(
result
.
getcode
(),
200
)
self
.
assertEqual
(
result
.
getcode
(),
200
)
def
test_updateAccount
(
self
):
def
test_updateAccount
(
self
):
...
@@ -606,7 +609,7 @@ setuptools = 33.1.1
...
@@ -606,7 +609,7 @@ setuptools = 33.1.1
class
PrintStringIO
(
StringIO
):
class
PrintStringIO
(
StringIO
):
def
write
(
self
,
data
):
def
write
(
self
,
data
):
StringIO
.
write
(
self
,
data
)
StringIO
.
write
(
self
,
data
)
print
data
print
(
data
)
def
main
():
def
main
():
"""
"""
...
...
slapos/runner/sup_process.py
View file @
1c8269b2
...
@@ -3,7 +3,7 @@
...
@@ -3,7 +3,7 @@
import
os
import
os
import
signal
import
signal
import
time
import
time
import
xmlrpclib
import
six.moves.xmlrpc_client
as
xmlrpclib
# This mini-library is used to communicate with supervisord process
# This mini-library is used to communicate with supervisord process
# It aims to replace the file "process.py"
# It aims to replace the file "process.py"
...
...
slapos/runner/utils.py
View file @
1c8269b2
...
@@ -2,20 +2,20 @@
...
@@ -2,20 +2,20 @@
# vim: set et sts=2:
# vim: set et sts=2:
# pylint: disable-msg=W0311,C0301,C0103,C0111,W0141,W0142
# pylint: disable-msg=W0311,C0301,C0103,C0111,W0141,W0142
import
ConfigP
arser
from
six.moves
import
configp
arser
import
datetime
import
datetime
import
json
import
json
import
logging
import
logging
import
md5
import
hashlib
import
os
import
os
import
sup_process
from
.
import
sup_process
import
re
import
re
import
shutil
import
shutil
import
stat
import
stat
import
thread
from
six.moves
import
_thread
,
range
import
time
import
time
import
urllib
from
six.moves.urllib.request
import
urlopen
import
xmlrpclib
import
six.moves.xmlrpc_client
as
xmlrpclib
from
xml.dom
import
minidom
from
xml.dom
import
minidom
import
xml_marshaller
import
xml_marshaller
...
@@ -43,7 +43,8 @@ html_escape_table = {
...
@@ -43,7 +43,8 @@ html_escape_table = {
def
getBuildAndRunParams
(
config
):
def
getBuildAndRunParams
(
config
):
json_file
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'config.json'
)
json_file
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'config.json'
)
json_params
=
json
.
load
(
open
(
json_file
))
with
open
(
json_file
)
as
f
:
json_params
=
json
.
load
(
f
)
return
json_params
return
json_params
...
@@ -52,7 +53,8 @@ def saveBuildAndRunParams(config, params):
...
@@ -52,7 +53,8 @@ def saveBuildAndRunParams(config, params):
Works like that because this function do not care
Works like that because this function do not care
about how you got the parameters"""
about how you got the parameters"""
json_file
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'config.json'
)
json_file
=
os
.
path
.
join
(
config
[
'etc_dir'
],
'config.json'
)
open
(
json_file
,
"w"
).
write
(
json
.
dumps
(
params
))
with
open
(
json_file
,
"w"
)
as
f
:
f
.
write
(
json
.
dumps
(
params
))
def
html_escape
(
text
):
def
html_escape
(
text
):
...
@@ -92,11 +94,11 @@ def updateUserCredential(config, username, password):
...
@@ -92,11 +94,11 @@ def updateUserCredential(config, username, password):
def
getRcode
(
config
):
def
getRcode
(
config
):
parser
=
ConfigP
arser
.
ConfigParser
()
parser
=
configp
arser
.
ConfigParser
()
try
:
try
:
parser
.
read
(
config
[
'knowledge0_cfg'
])
parser
.
read
(
config
[
'knowledge0_cfg'
])
return
parser
.
get
(
'public'
,
'recovery-code'
)
return
parser
.
get
(
'public'
,
'recovery-code'
)
except
(
ConfigP
arser
.
NoSectionError
,
IOError
)
as
e
:
except
(
configp
arser
.
NoSectionError
,
IOError
)
as
e
:
return
None
return
None
def
getUsernameList
(
config
):
def
getUsernameList
(
config
):
...
@@ -188,12 +190,12 @@ def updateProxy(config):
...
@@ -188,12 +190,12 @@ def updateProxy(config):
'software_root'
:
config
[
'software_root'
]
'software_root'
:
config
[
'software_root'
]
}
}
for
i
in
xrange
(
0
,
int
(
config
[
'partition_amount'
])):
for
i
in
range
(
int
(
config
[
'partition_amount'
])):
partition_reference
=
'%s%s'
%
(
prefix
,
i
)
partition_reference
=
'%s%s'
%
(
prefix
,
i
)
partition_path
=
os
.
path
.
join
(
config
[
'instance_root'
],
partition_reference
)
partition_path
=
os
.
path
.
join
(
config
[
'instance_root'
],
partition_reference
)
if
not
os
.
path
.
exists
(
partition_path
):
if
not
os
.
path
.
exists
(
partition_path
):
os
.
mkdir
(
partition_path
)
os
.
mkdir
(
partition_path
)
os
.
chmod
(
partition_path
,
0750
)
os
.
chmod
(
partition_path
,
0
o
750
)
slap_config
[
'partition_list'
].
append
({
slap_config
[
'partition_list'
].
append
({
'address_list'
:
[
'address_list'
:
[
{
{
...
@@ -423,7 +425,7 @@ def getSlapStatus(config):
...
@@ -423,7 +425,7 @@ def getSlapStatus(config):
except
Exception
:
except
Exception
:
pass
pass
if
partition_list
:
if
partition_list
:
for
i
in
xrange
(
0
,
int
(
config
[
'partition_amount'
])):
for
i
in
range
(
int
(
config
[
'partition_amount'
])):
slappart_id
=
'%s%s'
%
(
"slappart"
,
i
)
slappart_id
=
'%s%s'
%
(
"slappart"
,
i
)
if
not
[
x
[
0
]
for
x
in
partition_list
if
slappart_id
==
x
[
0
]]:
if
not
[
x
[
0
]
for
x
in
partition_list
if
slappart_id
==
x
[
0
]]:
partition_list
.
append
((
slappart_id
,
[]))
partition_list
.
append
((
slappart_id
,
[]))
...
@@ -460,7 +462,7 @@ def removeInstanceRootDirectory(config):
...
@@ -460,7 +462,7 @@ def removeInstanceRootDirectory(config):
fullPath
=
os
.
path
.
join
(
root
,
fname
)
fullPath
=
os
.
path
.
join
(
root
,
fname
)
if
not
os
.
access
(
fullPath
,
os
.
W_OK
):
if
not
os
.
access
(
fullPath
,
os
.
W_OK
):
# Some directories may be read-only, preventing to remove files in it
# Some directories may be read-only, preventing to remove files in it
os
.
chmod
(
fullPath
,
0744
)
os
.
chmod
(
fullPath
,
0
o
744
)
shutil
.
rmtree
(
instance_directory
)
shutil
.
rmtree
(
instance_directory
)
def
removeCurrentInstance
(
config
):
def
removeCurrentInstance
(
config
):
...
@@ -589,7 +591,7 @@ def newSoftware(folder, config, session):
...
@@ -589,7 +591,7 @@ def newSoftware(folder, config, session):
software = "https://lab.nexedi.com/nexedi/slapos/raw/master/software/lamp-template/software.cfg"
software = "https://lab.nexedi.com/nexedi/slapos/raw/master/software/lamp-template/software.cfg"
softwareContent = ""
softwareContent = ""
try:
try:
softwareContent = url
lib.url
open(software).read()
softwareContent = urlopen(software).read()
except:
except:
#Software.cfg and instance.cfg content will be empty
#Software.cfg and instance.cfg content will be empty
pass
pass
...
@@ -777,7 +779,7 @@ def md5sum(file):
...
@@ -777,7 +779,7 @@ def md5sum(file):
return False
return False
try:
try:
fh = open(file, 'rb')
fh = open(file, 'rb')
m =
md5
.md5()
m =
hashlib
.md5()
while True:
while True:
data = fh.read(8192)
data = fh.read(8192)
if not data:
if not data:
...
@@ -830,7 +832,7 @@ def readParameters(path):
...
@@ -830,7 +832,7 @@ def readParameters(path):
sub_obj[str(subnode.getAttribute('
id
'))] = subnode.childNodes[0].data # .decode('
utf
-
8
').decode('
utf
-
8
')
sub_obj[str(subnode.getAttribute('
id
'))] = subnode.childNodes[0].data # .decode('
utf
-
8
').decode('
utf
-
8
')
obj[str(elt.tagName)] = sub_obj
obj[str(elt.tagName)] = sub_obj
return obj
return obj
except Exception
,
e:
except Exception
as
e:
return str(e)
return str(e)
else:
else:
return "No such file or directory: %s" % path
return "No such file or directory: %s" % path
...
@@ -901,7 +903,8 @@ def runSlapgridUntilSuccess(config, step):
...
@@ -901,7 +903,8 @@ def runSlapgridUntilSuccess(config, step):
else:
else:
return -1
return -1
counter_file = os.path.join(config['
runner_workdir
'], '
.
turn
-
left
')
counter_file = os.path.join(config['
runner_workdir
'], '
.
turn
-
left
')
open(counter_file, '
w
+
').write(str(max_tries))
with open(counter_file, '
w
+
') as f:
f.write(str(max_tries))
counter = max_tries
counter = max_tries
slapgrid = True
slapgrid = True
# XXX-Nico runSoftwareWithLock can return 0 or False (0==False)
# XXX-Nico runSoftwareWithLock can return 0 or False (0==False)
...
@@ -911,9 +914,11 @@ def runSlapgridUntilSuccess(config, step):
...
@@ -911,9 +914,11 @@ def runSlapgridUntilSuccess(config, step):
# slapgrid == 0 because EXIT_SUCCESS == 0
# slapgrid == 0 because EXIT_SUCCESS == 0
if slapgrid == 0:
if slapgrid == 0:
break
break
times_left = int(open(counter_file).read()) - 1
with open(counter_file) as f:
times_left = int(f.read()) - 1
if times_left > 0 :
if times_left > 0 :
open(counter_file, '
w
+
').write(str(times_left))
with open(counter_file, '
w
+
') as f:
f.write(str(times_left))
counter = times_left
counter = times_left
else :
else :
counter = 0
counter = 0
...
@@ -934,7 +939,7 @@ def setupDefaultSR(config):
...
@@ -934,7 +939,7 @@ def setupDefaultSR(config):
if not os.path.exists(project) and config['
default_sr
'] != '':
if not os.path.exists(project) and config['
default_sr
'] != '':
configNewSR(config, config['
default_sr
'])
configNewSR(config, config['
default_sr
'])
if config['
auto_deploy
']:
if config['
auto_deploy
']:
thread.start_new_thread(buildAndRun, (config,))
_
thread.start_new_thread(buildAndRun, (config,))
def setMiniShellHistory(config, command):
def setMiniShellHistory(config, command):
...
...
slapos/runner/views.py
View file @
1c8269b2
...
@@ -7,9 +7,9 @@ import json
...
@@ -7,9 +7,9 @@ import json
import
os
import
os
import
shutil
import
shutil
import
subprocess
import
subprocess
import
sup_process
from
.
import
sup_process
import
thread
from
six.moves
import
_
thread
import
urllib
from
six.moves.urllib.parse
import
unquote
from
flask
import
(
Flask
,
request
,
redirect
,
url_for
,
render_template
,
from
flask
import
(
Flask
,
request
,
redirect
,
url_for
,
render_template
,
g
,
flash
,
jsonify
,
session
,
abort
,
send_file
)
g
,
flash
,
jsonify
,
session
,
abort
,
send_file
)
...
@@ -160,7 +160,7 @@ def removeSoftware():
...
@@ -160,7 +160,7 @@ def removeSoftware():
def
runSoftwareProfile
():
def
runSoftwareProfile
():
thread
.
start_new_thread
(
runSlapgridUntilSuccess
,
(
app
.
config
,
"software"
))
_
thread
.
start_new_thread
(
runSlapgridUntilSuccess
,
(
app
.
config
,
"software"
))
return
jsonify
(
result
=
True
)
return
jsonify
(
result
=
True
)
...
@@ -233,7 +233,7 @@ def removeInstance():
...
@@ -233,7 +233,7 @@ def removeInstance():
def
runInstanceProfile
():
def
runInstanceProfile
():
if
not
os
.
path
.
exists
(
app
.
config
[
'instance_root'
]):
if
not
os
.
path
.
exists
(
app
.
config
[
'instance_root'
]):
os
.
mkdir
(
app
.
config
[
'instance_root'
])
os
.
mkdir
(
app
.
config
[
'instance_root'
])
thread
.
start_new_thread
(
runSlapgridUntilSuccess
,
(
app
.
config
,
"instance"
))
_
thread
.
start_new_thread
(
runSlapgridUntilSuccess
,
(
app
.
config
,
"instance"
))
return
jsonify
(
result
=
True
)
return
jsonify
(
result
=
True
)
...
@@ -293,7 +293,7 @@ def cloneRepository():
...
@@ -293,7 +293,7 @@ def cloneRepository():
try
:
try
:
cloneRepo
(
request
.
form
[
'repo'
],
path
,
request
.
form
[
'user'
],
request
.
form
[
'email'
])
cloneRepo
(
request
.
form
[
'repo'
],
path
,
request
.
form
[
'user'
],
request
.
form
[
'email'
])
return
jsonify
(
code
=
1
,
result
=
""
)
return
jsonify
(
code
=
1
,
result
=
""
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
...
@@ -324,7 +324,7 @@ def getProjectStatus():
...
@@ -324,7 +324,7 @@ def getProjectStatus():
try
:
try
:
result
,
branch
,
isdirty
=
gitStatus
(
path
)
result
,
branch
,
isdirty
=
gitStatus
(
path
)
return
jsonify
(
code
=
1
,
result
=
result
,
branch
=
branch
,
dirty
=
isdirty
)
return
jsonify
(
code
=
1
,
result
=
result
,
branch
=
branch
,
dirty
=
isdirty
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
else
:
else
:
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
...
@@ -414,7 +414,7 @@ def changeBranch():
...
@@ -414,7 +414,7 @@ def changeBranch():
else
:
else
:
json
=
"This is already your active branch for this project"
json
=
"This is already your active branch for this project"
return
jsonify
(
code
=
1
,
result
=
json
)
return
jsonify
(
code
=
1
,
result
=
json
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
else
:
else
:
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
...
@@ -432,7 +432,7 @@ def newBranch():
...
@@ -432,7 +432,7 @@ def newBranch():
return
jsonify
(
code
=
1
,
result
=
""
)
return
jsonify
(
code
=
1
,
result
=
""
)
else
:
else
:
return
jsonify
(
code
=
0
,
result
=
"Failed to checkout to branch %s."
)
return
jsonify
(
code
=
0
,
result
=
"Failed to checkout to branch %s."
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
return
jsonify
(
code
=
0
,
result
=
safeResult
(
str
(
e
)))
else
:
else
:
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
return
jsonify
(
code
=
0
,
result
=
"Can not read folder: Permission Denied"
)
...
@@ -638,7 +638,7 @@ def updateAccount():
...
@@ -638,7 +638,7 @@ def updateAccount():
try
:
try
:
updateGitConfig
(
app
.
config
[
'default_repository_path'
],
name
,
email
)
updateGitConfig
(
app
.
config
[
'default_repository_path'
],
name
,
email
)
except
GitCommandError
,
e
:
except
GitCommandError
as
e
:
return
jsonify
(
code
=
0
,
result
=
str
(
e
))
return
jsonify
(
code
=
0
,
result
=
str
(
e
))
git_user_file
=
os
.
path
.
join
(
app
.
config
[
'etc_dir'
],
'.git_user'
)
git_user_file
=
os
.
path
.
join
(
app
.
config
[
'etc_dir'
],
'.git_user'
)
with
codecs
.
open
(
git_user_file
,
'w'
,
encoding
=
'utf-8'
)
as
gfile
:
with
codecs
.
open
(
git_user_file
,
'w'
,
encoding
=
'utf-8'
)
as
gfile
:
...
@@ -684,10 +684,8 @@ def fileBrowser():
...
@@ -684,10 +684,8 @@ def fileBrowser():
dir
=
request
.
form
[
'dir'
].
encode
(
'utf-8'
)
dir
=
request
.
form
[
'dir'
].
encode
(
'utf-8'
)
newfilename
=
request
.
form
.
get
(
'newfilename'
,
''
).
encode
(
'utf-8'
)
newfilename
=
request
.
form
.
get
(
'newfilename'
,
''
).
encode
(
'utf-8'
)
files
=
request
.
form
.
get
(
'files'
,
''
).
encode
(
'utf-8'
)
files
=
request
.
form
.
get
(
'files'
,
''
).
encode
(
'utf-8'
)
if
not
request
.
form
.
has_key
(
'opt'
)
or
not
request
.
form
[
'opt'
]:
opt
=
request
.
form
.
get
(
'opt'
)
opt
=
1
opt
=
int
(
opt
)
if
opt
else
1
else
:
opt
=
int
(
request
.
form
[
'opt'
])
else
:
else
:
opt
=
int
(
request
.
args
.
get
(
'opt'
))
opt
=
int
(
request
.
args
.
get
(
'opt'
))
...
@@ -751,9 +749,9 @@ def fileBrowser():
...
@@ -751,9 +749,9 @@ def fileBrowser():
def
editFile
():
def
editFile
():
return
render_template
(
'editFile.html'
,
workDir
=
'workspace'
,
return
render_template
(
'editFile.html'
,
workDir
=
'workspace'
,
profile
=
u
rllib
.
u
nquote
(
request
.
args
.
get
(
'profile'
,
''
)),
profile
=
unquote
(
request
.
args
.
get
(
'profile'
,
''
)),
projectList
=
listFolder
(
app
.
config
,
'workspace'
),
projectList
=
listFolder
(
app
.
config
,
'workspace'
),
filename
=
u
rllib
.
u
nquote
(
request
.
args
.
get
(
'filename'
,
''
)))
filename
=
unquote
(
request
.
args
.
get
(
'filename'
,
''
)))
def
shell
():
def
shell
():
return
render_template
(
'shell.html'
)
return
render_template
(
'shell.html'
)
...
...
slapos/securedelete.py
View file @
1c8269b2
...
@@ -24,6 +24,8 @@
...
@@ -24,6 +24,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
os
import
os
import
argparse
import
argparse
import
subprocess
import
subprocess
...
@@ -96,7 +98,8 @@ def shred(options):
...
@@ -96,7 +98,8 @@ def shred(options):
arg_list
.
extend
(
getFileList
(
options
.
file_list
,
options
.
check_exist
))
arg_list
.
extend
(
getFileList
(
options
.
file_list
,
options
.
check_exist
))
pshred
=
subprocess
.
Popen
(
arg_list
,
stdout
=
subprocess
.
PIPE
,
pshred
=
subprocess
.
Popen
(
arg_list
,
stdout
=
subprocess
.
PIPE
,
stderr
=
subprocess
.
STDOUT
)
stderr
=
subprocess
.
STDOUT
,
universal_newlines
=
True
)
result
,
stderr
=
pshred
.
communicate
()
result
,
stderr
=
pshred
.
communicate
()
if
pshred
.
returncode
is
None
:
if
pshred
.
returncode
is
None
:
pshred
.
kill
()
pshred
.
kill
()
...
@@ -108,4 +111,4 @@ def shred(options):
...
@@ -108,4 +111,4 @@ def shred(options):
def
main
():
def
main
():
arg_parser
=
getAgumentParser
()
arg_parser
=
getAgumentParser
()
output
=
shred
(
arg_parser
.
parse_args
())
output
=
shred
(
arg_parser
.
parse_args
())
print
output
print
(
output
)
slapos/systool.py
View file @
1c8269b2
from
__future__
import
print_function
import
argparse
import
argparse
import
sys
import
sys
import
os
import
os
...
@@ -12,7 +14,7 @@ def killpidfromfile():
...
@@ -12,7 +14,7 @@ def killpidfromfile():
if
sig
is
None
:
if
sig
is
None
:
raise
ValueError
(
'Unknown signal name %s'
%
sys
.
argv
[
2
])
raise
ValueError
(
'Unknown signal name %s'
%
sys
.
argv
[
2
])
pid
=
int
(
open
(
file
).
read
())
pid
=
int
(
open
(
file
).
read
())
print
'Killing pid %s with signal %s'
%
(
pid
,
sys
.
argv
[
2
]
)
print
(
'Killing pid %s with signal %s'
%
(
pid
,
sys
.
argv
[
2
])
)
os
.
kill
(
pid
,
sig
)
os
.
kill
(
pid
,
sig
)
def
sublist
(
a
,
b
):
def
sublist
(
a
,
b
):
...
@@ -63,7 +65,7 @@ def kill():
...
@@ -63,7 +65,7 @@ def kill():
cmdline
=
p
.
cmdline
()
cmdline
=
p
.
cmdline
()
if
cmdline
==
args
.
arg
if
args
.
full
else
sublist
(
cmdline
,
args
.
arg
):
if
cmdline
==
args
.
arg
if
args
.
full
else
sublist
(
cmdline
,
args
.
arg
):
p
.
send_signal
(
s
)
p
.
send_signal
(
s
)
print
'killed pid %s with signal %s'
%
(
p
.
pid
,
args
.
signal
)
print
(
'killed pid %s with signal %s'
%
(
p
.
pid
,
args
.
signal
)
)
r
=
0
r
=
0
except
psutil
.
Error
:
except
psutil
.
Error
:
pass
pass
...
...
slapos/test/monitor/test_config_document.py
View file @
1c8269b2
...
@@ -78,7 +78,7 @@ echo "htpasswd $@" > %s/monitor-htpasswd
...
@@ -78,7 +78,7 @@ echo "htpasswd $@" > %s/monitor-htpasswd
self
.
writeContent
(
self
.
monitor_https_cors
,
'{% set allow_domain = "|".join(domain.replace(".", "
\
.
"
).split()) -%}
\
n
'
self
.
writeContent
(
self
.
monitor_https_cors
,
'{% set allow_domain = "|".join(domain.replace(".", "
\
.
"
).split()) -%}
\
n
'
'SetEnvIf Origin "^http(s)?://(.+
\
.)?({{
a
llow_domain }})$" ORIGIN_DOMAIN=$0
\
n
'
'SetEnvIf Origin "^http(s)?://(.+
\
.)?({{
a
llow_domain }})$" ORIGIN_DOMAIN=$0
\
n
'
'Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN'
)
'Header always set Access-Control-Allow-Origin "%{ORIGIN_DOMAIN}e" env=ORIGIN_DOMAIN'
)
os
.
chmod
(
self
.
httpd_passwd_bin
,
0755
)
os
.
chmod
(
self
.
httpd_passwd_bin
,
0
o
755
)
def
tearDown
(
self
):
def
tearDown
(
self
):
if
os
.
path
.
exists
(
self
.
base_dir
):
if
os
.
path
.
exists
(
self
.
base_dir
):
...
@@ -101,31 +101,37 @@ echo "htpasswd $@" > %s/monitor-htpasswd
...
@@ -101,31 +101,37 @@ echo "htpasswd $@" > %s/monitor-htpasswd
def
check_config
(
self
):
def
check_config
(
self
):
config_parameter
=
os
.
path
.
join
(
self
.
config_dir
,
'config.parameters.json'
)
config_parameter
=
os
.
path
.
join
(
self
.
config_dir
,
'config.parameters.json'
)
config_parameter_json
=
json
.
load
(
open
(
config_parameter
))
with
open
(
config_parameter
)
as
f
:
config_json
=
json
.
load
(
open
(
self
.
config_path
))
config_parameter_json
=
json
.
load
(
f
)
with
open
(
self
.
config_path
)
as
f
:
config_json
=
json
.
load
(
f
)
for
config
in
config_json
:
for
config
in
config_json
:
if
config
[
"key"
]:
if
config
[
"key"
]:
self
.
assert
True
(
config_parameter_json
.
has_key
(
config
[
"key"
])
)
self
.
assert
In
(
config
[
"key"
],
config_parameter_json
)
parameter
=
config_parameter_json
[
config
[
"key"
]]
parameter
=
config_parameter_json
[
config
[
"key"
]]
else
:
else
:
continue
continue
if
config
[
"key"
]
==
'from-file'
:
if
config
[
"key"
]
==
'from-file'
:
self
.
assertTrue
(
os
.
path
.
exists
(
parameter
[
'file'
]))
self
.
assertTrue
(
os
.
path
.
exists
(
parameter
[
'file'
]))
self
.
assertEqual
(
config
[
"value"
],
open
(
parameter
[
'file'
]).
read
())
with
open
(
parameter
[
'file'
])
as
f
:
self
.
assertEqual
(
config
[
"value"
],
f
.
read
())
elif
config
[
"key"
]
==
'httpd-password'
:
elif
config
[
"key"
]
==
'httpd-password'
:
http_passwd
=
"%s/monitor-htpasswd"
%
self
.
base_dir
http_passwd
=
"%s/monitor-htpasswd"
%
self
.
base_dir
#XXX where \n bellow come from ?
#XXX where \n bellow come from ?
command
=
'htpasswd -cb %s admin %s%s'
%
(
http_passwd
,
config
[
"value"
],
'
\
n
'
)
command
=
'htpasswd -cb %s admin %s%s'
%
(
http_passwd
,
config
[
"value"
],
'
\
n
'
)
self
.
assertTrue
(
os
.
path
.
exists
(
parameter
[
'file'
]))
self
.
assertTrue
(
os
.
path
.
exists
(
parameter
[
'file'
]))
self
.
assertTrue
(
os
.
path
.
exists
(
http_passwd
))
self
.
assertTrue
(
os
.
path
.
exists
(
http_passwd
))
self
.
assertEqual
(
config
[
"value"
],
open
(
parameter
[
'file'
]).
read
())
with
open
(
parameter
[
'file'
])
as
f
:
self
.
assertEqual
(
open
(
http_passwd
).
read
(),
command
)
self
.
assertEqual
(
config
[
"value"
],
f
.
read
())
with
open
(
http_passwd
)
as
f
:
self
.
assertEqual
(
f
.
read
(),
command
)
elif
config
[
"key"
]
==
'cors-domain'
:
elif
config
[
"key"
]
==
'cors-domain'
:
cors_file
=
"%s/test-httpd-cors.cfg"
%
self
.
base_dir
cors_file
=
"%s/test-httpd-cors.cfg"
%
self
.
base_dir
self
.
assertTrue
(
os
.
path
.
exists
(
cors_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
cors_file
))
cors_string
=
self
.
generate_cors_string
(
config
[
"value"
].
split
())
cors_string
=
self
.
generate_cors_string
(
config
[
"value"
].
split
())
self
.
assertEqual
(
cors_string
,
open
(
cors_file
).
read
())
with
open
(
cors_file
)
as
f
:
self
.
assertEqual
(
cors_string
,
f
.
read
())
def
check_cfg_config
(
self
,
config_list
):
def
check_cfg_config
(
self
,
config_list
):
cfg_output
=
os
.
path
.
join
(
self
.
config_dir
,
'config.cfg'
)
cfg_output
=
os
.
path
.
join
(
self
.
config_dir
,
'config.cfg'
)
...
...
slapos/test/monitor/testbootstrap.py
View file @
1c8269b2
...
@@ -73,7 +73,6 @@ monitor-url-list = %(url_list)s
...
@@ -73,7 +73,6 @@ monitor-url-list = %(url_list)s
collector-db =
collector-db =
base-url = %(base_url)s
base-url = %(base_url)s
title = %(title)s
title = %(title)s
service-pid-folder = %(base_dir)s/run
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-runner = %(promise_run_script)s
promise-runner = %(promise_run_script)s
randomsleep = /bin/echo sleep
randomsleep = /bin/echo sleep
...
@@ -102,11 +101,11 @@ partition-folder = %(base_dir)s
...
@@ -102,11 +101,11 @@ partition-folder = %(base_dir)s
for
index
in
range
(
1
,
amount
+
1
):
for
index
in
range
(
1
,
amount
+
1
):
promise_file
=
os
.
path
.
join
(
promise_dir
,
'monitor_promise-%s'
%
index
)
promise_file
=
os
.
path
.
join
(
promise_dir
,
'monitor_promise-%s'
%
index
)
self
.
writeContent
(
promise_file
,
promse_content
)
self
.
writeContent
(
promise_file
,
promse_content
)
os
.
chmod
(
promise_file
,
0755
)
os
.
chmod
(
promise_file
,
0
o
755
)
for
index
in
range
(
1
,
amount
+
1
):
for
index
in
range
(
1
,
amount
+
1
):
promise_file
=
os
.
path
.
join
(
plugin_dir
,
'monitor_promise-%s.py'
%
index
)
promise_file
=
os
.
path
.
join
(
plugin_dir
,
'monitor_promise-%s.py'
%
index
)
self
.
writeContent
(
promise_file
,
promse_content
)
self
.
writeContent
(
promise_file
,
promse_content
)
os
.
chmod
(
promise_file
,
0644
)
os
.
chmod
(
promise_file
,
0
o
644
)
def
checkOPML
(
self
,
url_list
):
def
checkOPML
(
self
,
url_list
):
opml_title
=
"<title>%(root_title)s</title>"
%
self
.
monitor_config_dict
opml_title
=
"<title>%(root_title)s</title>"
%
self
.
monitor_config_dict
...
@@ -250,7 +249,8 @@ partition-folder = %(base_dir)s
...
@@ -250,7 +249,8 @@ partition-folder = %(base_dir)s
instance_config
=
os
.
path
.
join
(
instance
.
config_folder
,
'.jio_documents'
,
'config.json'
)
instance_config
=
os
.
path
.
join
(
instance
.
config_folder
,
'.jio_documents'
,
'config.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
instance_config
))
self
.
assertTrue
(
os
.
path
.
exists
(
instance_config
))
config_content
=
json
.
loads
(
open
(
instance_config
).
read
())
with
open
(
instance_config
)
as
f
:
config_content
=
json
.
load
(
f
)
self
.
assertEqual
(
len
(
config_content
),
4
)
self
.
assertEqual
(
len
(
config_content
),
4
)
key_list
=
[
''
,
'sample'
,
'monitor-password'
,
'cors-domain'
]
key_list
=
[
''
,
'sample'
,
'monitor-password'
,
'cors-domain'
]
for
parameter
in
config_content
:
for
parameter
in
config_content
:
...
...
slapos/test/monitor/testglobalstate.py
View file @
1c8269b2
...
@@ -44,7 +44,7 @@ class MonitorGlobalTest(unittest.TestCase):
...
@@ -44,7 +44,7 @@ class MonitorGlobalTest(unittest.TestCase):
pkg_resources
.
resource_string
(
pkg_resources
.
resource_string
(
'slapos.monitor'
,
'slapos.monitor'
,
'doc/monitor_instance.schema.json'
)
'doc/monitor_instance.schema.json'
)
self
.
monitor_instance_schema
=
json
.
loads
(
monitor_schema_string
)
self
.
monitor_instance_schema
=
json
.
loads
(
monitor_schema_string
.
decode
(
'utf-8'
)
)
self
.
monitor_config_dict
=
dict
(
self
.
monitor_config_dict
=
dict
(
...
@@ -91,7 +91,6 @@ monitor-url-list = %(url_list)s
...
@@ -91,7 +91,6 @@ monitor-url-list = %(url_list)s
collector-db =
collector-db =
base-url = %(base_url)s
base-url = %(base_url)s
title = %(title)s
title = %(title)s
service-pid-folder = %(base_dir)s/run
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-output-file = %(base_dir)s/monitor-bootstrap-status
promise-runner = %(promise_run_script)s
promise-runner = %(promise_run_script)s
randomsleep = /bin/echo sleep
randomsleep = /bin/echo sleep
...
@@ -132,7 +131,7 @@ exit %(code)s
...
@@ -132,7 +131,7 @@ exit %(code)s
"""
%
result_dict
"""
%
result_dict
promise_path
=
os
.
path
.
join
(
self
.
etc_dir
,
'promise'
,
name
)
promise_path
=
os
.
path
.
join
(
self
.
etc_dir
,
'promise'
,
name
)
self
.
writeContent
(
promise_path
,
content
)
self
.
writeContent
(
promise_path
,
content
)
os
.
chmod
(
promise_path
,
0755
)
os
.
chmod
(
promise_path
,
0
o
755
)
return
promise_path
return
promise_path
def
getPromiseParser
(
self
):
def
getPromiseParser
(
self
):
...
@@ -230,7 +229,7 @@ exit %(code)s
...
@@ -230,7 +229,7 @@ exit %(code)s
}"""
}"""
with
open
(
os
.
path
.
join
(
self
.
private_dir
,
'monitor.global.json'
))
as
r
:
with
open
(
os
.
path
.
join
(
self
.
private_dir
,
'monitor.global.json'
))
as
r
:
result
=
json
.
load
s
(
r
.
read
().
decode
(
"utf-8"
)
)
result
=
json
.
load
(
r
)
result
.
pop
(
"date"
)
result
.
pop
(
"date"
)
self
.
assertEqual
(
result
,
self
.
assertEqual
(
result
,
json
.
loads
(
expected_result
))
json
.
loads
(
expected_result
))
...
@@ -248,7 +247,7 @@ exit %(code)s
...
@@ -248,7 +247,7 @@ exit %(code)s
expected_result_dict
[
"state"
]
=
{
'error'
:
0
,
'success'
:
4
}
expected_result_dict
[
"state"
]
=
{
'error'
:
0
,
'success'
:
4
}
instance_result_dict
=
None
instance_result_dict
=
None
with
open
(
os
.
path
.
join
(
self
.
private_dir
,
'monitor.global.json'
))
as
r
:
with
open
(
os
.
path
.
join
(
self
.
private_dir
,
'monitor.global.json'
))
as
r
:
instance_result_dict
=
json
.
load
s
(
r
.
read
().
decode
(
"utf-8"
)
)
instance_result_dict
=
json
.
load
(
r
)
result
=
instance_result_dict
.
copy
()
result
=
instance_result_dict
.
copy
()
result
.
pop
(
"date"
)
result
.
pop
(
"date"
)
self
.
assertEqual
(
result
,
self
.
assertEqual
(
result
,
...
...
slapos/test/monitor/testrunpromise.py
View file @
1c8269b2
...
@@ -69,7 +69,7 @@ exit 0
...
@@ -69,7 +69,7 @@ exit 0
"""
"""
promise_path
=
os
.
path
.
join
(
self
.
old_promise_dir
,
name
)
promise_path
=
os
.
path
.
join
(
self
.
old_promise_dir
,
name
)
self
.
writeContent
(
promise_path
,
content
)
self
.
writeContent
(
promise_path
,
content
)
os
.
chmod
(
promise_path
,
0755
)
os
.
chmod
(
promise_path
,
0
o
755
)
return
promise_path
return
promise_path
def
writePromiseNOK
(
self
,
name
):
def
writePromiseNOK
(
self
,
name
):
...
@@ -80,19 +80,18 @@ exit 2
...
@@ -80,19 +80,18 @@ exit 2
"""
"""
promise_path
=
os
.
path
.
join
(
self
.
old_promise_dir
,
name
)
promise_path
=
os
.
path
.
join
(
self
.
old_promise_dir
,
name
)
self
.
writeContent
(
promise_path
,
content
)
self
.
writeContent
(
promise_path
,
content
)
os
.
chmod
(
promise_path
,
0755
)
os
.
chmod
(
promise_path
,
0
o
755
)
return
promise_path
return
promise_path
def
generatePromiseScript
(
self
,
name
,
success
=
True
,
failure_count
=
1
,
content
=
""
,
def
generatePromiseScript
(
self
,
name
,
success
=
True
,
failure_count
=
1
,
content
=
""
,
periodicity
=
0.03
):
periodicity
=
0.03
):
promise_content
=
"""from zope
import interface as zope_interface
promise_content
=
"""from zope
.interface import implementer
from slapos.grid.promise import interface
from slapos.grid.promise import interface
from slapos.grid.promise import GenericPromise
from slapos.grid.promise import GenericPromise
@implementer(interface.IPromise)
class RunPromise(GenericPromise):
class RunPromise(GenericPromise):
zope_interface.implements(interface.IPromise)
def __init__(self, config):
def __init__(self, config):
GenericPromise.__init__(self, config)
GenericPromise.__init__(self, config)
self.setPeriodicity(minute=%(periodicity)s)
self.setPeriodicity(minute=%(periodicity)s)
...
@@ -151,21 +150,23 @@ class RunPromise(GenericPromise):
...
@@ -151,21 +150,23 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
os
.
system
(
'cat %s'
%
result_file
)
os
.
system
(
'cat %s'
%
result_file
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
my_result
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
my_result
=
json
.
load
(
f
)
my_result
[
'result'
].
pop
(
'date'
)
my_result
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
u'title'
:
u'my_promise'
,
u'name'
:
u'my_promise.py'
,
u'title'
:
u'my_promise'
,
u'name'
:
u'my_promise.py'
,
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
}
}
self
.
assertTrue
(
my_result
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
my_result
)
self
.
assertEqual
(
expected_result
,
my_result
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_second_promise.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_second_promise.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
second_result
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
second_result
=
json
.
load
(
f
)
second_result
[
'result'
].
pop
(
'date'
)
second_result
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
...
@@ -173,9 +174,9 @@ class RunPromise(GenericPromise):
...
@@ -173,9 +174,9 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s/my_second_promise.py'
%
self
.
promise_dir
,
u'path'
:
u'%s/my_second_promise.py'
%
self
.
promise_dir
,
}
}
self
.
assertTrue
(
second_result
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
second_result
)
self
.
assertEqual
(
expected_result
,
second_result
)
def
test_promise_generic_failed
(
self
):
def
test_promise_generic_failed
(
self
):
...
@@ -186,7 +187,8 @@ class RunPromise(GenericPromise):
...
@@ -186,7 +187,8 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
my_result
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
my_result
=
json
.
load
(
f
)
my_result
[
'result'
].
pop
(
'date'
)
my_result
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
...
@@ -194,9 +196,9 @@ class RunPromise(GenericPromise):
...
@@ -194,9 +196,9 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
}
}
self
.
assertTrue
(
my_result
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
my_result
)
self
.
assertEqual
(
expected_result
,
my_result
)
def
test_promise_generic_status_change
(
self
):
def
test_promise_generic_status_change
(
self
):
...
@@ -207,7 +209,8 @@ class RunPromise(GenericPromise):
...
@@ -207,7 +209,8 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'my_promise.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
my_result
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
my_result
=
json
.
load
(
f
)
my_result
[
'result'
].
pop
(
'date'
)
my_result
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
...
@@ -215,9 +218,9 @@ class RunPromise(GenericPromise):
...
@@ -215,9 +218,9 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
}
}
self
.
assertTrue
(
my_result
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
my_result
)
self
.
assertEqual
(
expected_result
,
my_result
)
os
.
system
(
'rm %s/*.pyc'
%
self
.
promise_dir
)
os
.
system
(
'rm %s/*.pyc'
%
self
.
promise_dir
)
...
@@ -226,7 +229,8 @@ class RunPromise(GenericPromise):
...
@@ -226,7 +229,8 @@ class RunPromise(GenericPromise):
promise_runner2
=
MonitorPromiseLauncher
(
parser
)
promise_runner2
=
MonitorPromiseLauncher
(
parser
)
promise_runner2
.
start
()
promise_runner2
.
start
()
my_result
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
my_result
=
json
.
load
(
f
)
my_result
[
'result'
].
pop
(
'date'
)
my_result
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
...
@@ -234,9 +238,9 @@ class RunPromise(GenericPromise):
...
@@ -234,9 +238,9 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
u'path'
:
u'%s/my_promise.py'
%
self
.
promise_dir
,
}
}
self
.
assertTrue
(
my_result
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
my_result
)
self
.
assertEqual
(
expected_result
,
my_result
)
def
test_promise_generic_periodicity
(
self
):
def
test_promise_generic_periodicity
(
self
):
...
@@ -287,7 +291,8 @@ class RunPromise(GenericPromise):
...
@@ -287,7 +291,8 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
result1
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result1
=
json
.
load
(
f
)
start_date
=
datetime
.
strptime
(
result1
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
start_date
=
datetime
.
strptime
(
result1
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
expected_result
=
{
expected_result
=
{
...
@@ -295,7 +300,6 @@ class RunPromise(GenericPromise):
...
@@ -295,7 +300,6 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s'
%
promise
,
u'path'
:
u'%s'
%
promise
,
}
}
...
@@ -303,8 +307,10 @@ class RunPromise(GenericPromise):
...
@@ -303,8 +307,10 @@ class RunPromise(GenericPromise):
parser
=
self
.
getPromiseParser
(
force
=
True
)
parser
=
self
.
getPromiseParser
(
force
=
True
)
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
.
start
()
promise_runner
.
start
()
result2
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result2
=
json
.
load
(
f
)
start_date2
=
datetime
.
strptime
(
result2
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
start_date2
=
datetime
.
strptime
(
result2
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
self
.
assertTrue
(
result2
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result2
)
self
.
assertEqual
(
expected_result
,
result2
)
def
test_promise_two_folder
(
self
):
def
test_promise_two_folder
(
self
):
...
@@ -319,7 +325,8 @@ class RunPromise(GenericPromise):
...
@@ -319,7 +325,8 @@ class RunPromise(GenericPromise):
result2_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_2.status.json'
)
result2_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_2.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result2_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result2_file
))
result1
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result1
=
json
.
load
(
f
)
start_date
=
datetime
.
strptime
(
result1
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
start_date
=
datetime
.
strptime
(
result1
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
expected_result
=
{
expected_result
=
{
...
@@ -327,12 +334,13 @@ class RunPromise(GenericPromise):
...
@@ -327,12 +334,13 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s'
%
promise
,
u'path'
:
u'%s'
%
promise
,
}
}
self
.
assertTrue
(
result1
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result1
)
self
.
assertEqual
(
expected_result
,
result1
)
result2
=
json
.
loads
(
open
(
result2_file
).
read
())
with
open
(
result2_file
)
as
f
:
result2
=
json
.
load
(
f
)
start_date2
=
datetime
.
strptime
(
result2
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
start_date2
=
datetime
.
strptime
(
result2
[
'result'
].
pop
(
'date'
),
'%Y-%m-%dT%H:%M:%S+0000'
)
expected_result
=
{
expected_result
=
{
...
@@ -340,9 +348,9 @@ class RunPromise(GenericPromise):
...
@@ -340,9 +348,9 @@ class RunPromise(GenericPromise):
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s'
%
promise2
,
u'path'
:
u'%s'
%
promise2
,
}
}
self
.
assertTrue
(
result2
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result2
)
self
.
assertEqual
(
expected_result
,
result2
)
def
test_promise_NOK
(
self
):
def
test_promise_NOK
(
self
):
...
@@ -353,23 +361,26 @@ class RunPromise(GenericPromise):
...
@@ -353,23 +361,26 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
result1
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result1
=
json
.
load
(
f
)
result1
[
'result'
].
pop
(
'date'
)
result1
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
u'title'
:
u'promise_1'
,
u'name'
:
u'promise_1'
,
u'title'
:
u'promise_1'
,
u'name'
:
u'promise_1'
,
u'result'
:
{
u'result'
:
{
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
u'failed'
:
True
,
u'message'
:
u'failed'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s'
%
promise
,
u'path'
:
u'%s'
%
promise
,
}
}
self
.
assertTrue
(
result1
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result1
)
self
.
assertEqual
(
expected_result
,
result1
)
# second run
# second run
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
.
start
()
promise_runner
.
start
()
result2
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result2
=
json
.
load
(
f
)
result2
[
'result'
].
pop
(
'date'
)
result2
[
'result'
].
pop
(
'date'
)
self
.
assertTrue
(
result2
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result2
)
self
.
assertEqual
(
expected_result
,
result2
)
def
test_promise_mixed
(
self
):
def
test_promise_mixed
(
self
):
...
@@ -380,16 +391,17 @@ class RunPromise(GenericPromise):
...
@@ -380,16 +391,17 @@ class RunPromise(GenericPromise):
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
result_file
=
os
.
path
.
join
(
self
.
output_dir
,
'promise_1.status.json'
)
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
result_file
))
result1
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result1
=
json
.
load
(
f
)
result1
[
'result'
].
pop
(
'date'
)
result1
[
'result'
].
pop
(
'date'
)
expected_result
=
{
expected_result
=
{
u'title'
:
u'promise_1'
,
u'name'
:
u'promise_1'
,
u'title'
:
u'promise_1'
,
u'name'
:
u'promise_1'
,
u'result'
:
{
u'result'
:
{
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
u'failed'
:
False
,
u'message'
:
u'success'
,
u'type'
:
u'Test Result'
},
},
u'execution-time'
:
0.05
,
u'path'
:
u'%s'
%
promise
,
u'path'
:
u'%s'
%
promise
,
}
}
self
.
assertTrue
(
result1
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result1
)
self
.
assertEqual
(
expected_result
,
result1
)
# second run with failure
# second run with failure
...
@@ -401,7 +413,9 @@ class RunPromise(GenericPromise):
...
@@ -401,7 +413,9 @@ class RunPromise(GenericPromise):
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
=
MonitorPromiseLauncher
(
parser
)
promise_runner
.
start
()
promise_runner
.
start
()
result2
=
json
.
loads
(
open
(
result_file
).
read
().
decode
(
"utf-8"
))
with
open
(
result_file
)
as
f
:
result2
=
json
.
load
(
f
)
result2
[
'result'
].
pop
(
'date'
)
result2
[
'result'
].
pop
(
'date'
)
self
.
assertTrue
(
result2
.
pop
(
'execution-time'
))
self
.
assertEqual
(
expected_result
,
result2
)
self
.
assertEqual
(
expected_result
,
result2
)
slapos/test/promise/plugin/test_check_file_state.py
View file @
1c8269b2
...
@@ -32,6 +32,7 @@ import tempfile
...
@@ -32,6 +32,7 @@ import tempfile
import
os
import
os
import
unittest
import
unittest
import
shutil
import
shutil
import
six
class
TestCheckFileState
(
TestPromisePluginMixin
):
class
TestCheckFileState
(
TestPromisePluginMixin
):
...
@@ -69,8 +70,10 @@ extra_config_dict = {
...
@@ -69,8 +70,10 @@ extra_config_dict = {
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
self
.
assertEqual
(
result
[
'result'
][
'message'
],
result
[
'result'
][
'message'
],
"ERROR IOError(21, 'Is a directory') "
"ERROR %s(21, 'Is a directory') "
"during opening and reading file %r"
%
(
filename
,)
"during opening and reading file %r"
%
(
"IsADirectoryError"
if
six
.
PY3
else
"IOError"
,
filename
)
)
)
def
test_check_file_not_exists
(
self
):
def
test_check_file_not_exists
(
self
):
...
@@ -88,8 +91,10 @@ extra_config_dict = {
...
@@ -88,8 +91,10 @@ extra_config_dict = {
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
self
.
assertEqual
(
result
[
'result'
][
'message'
],
result
[
'result'
][
'message'
],
"ERROR IOError(2, 'No such file or directory') "
"ERROR %s(2, 'No such file or directory') "
"during opening and reading file %r"
%
(
filename
,)
"during opening and reading file %r"
%
(
"FileNotFoundError"
if
six
.
PY3
else
"IOError"
,
filename
)
)
)
def
test_check_file_empty
(
self
):
def
test_check_file_empty
(
self
):
...
...
slapos/test/promise/plugin/test_check_url_available.py
View file @
1c8269b2
...
@@ -27,6 +27,7 @@
...
@@ -27,6 +27,7 @@
from
slapos.grid.promise
import
PromiseError
from
slapos.grid.promise
import
PromiseError
from
slapos.test.promise.plugin
import
TestPromisePluginMixin
from
slapos.test.promise.plugin
import
TestPromisePluginMixin
from
slapos.util
import
str2bytes
from
cryptography
import
x509
from
cryptography
import
x509
from
cryptography.hazmat.backends
import
default_backend
from
cryptography.hazmat.backends
import
default_backend
...
@@ -34,12 +35,13 @@ from cryptography.hazmat.primitives import hashes
...
@@ -34,12 +35,13 @@ from cryptography.hazmat.primitives import hashes
from
cryptography.hazmat.primitives
import
serialization
from
cryptography.hazmat.primitives
import
serialization
from
cryptography.hazmat.primitives.asymmetric
import
rsa
from
cryptography.hazmat.primitives.asymmetric
import
rsa
from
cryptography.x509.oid
import
NameOID
from
cryptography.x509.oid
import
NameOID
import
BaseHTTPServer
from
six.moves
import
BaseHTTPServer
import
datetime
import
datetime
import
ipaddress
import
ipaddress
import
json
import
json
import
multiprocessing
import
multiprocessing
import
os
import
os
import
six
import
ssl
import
ssl
import
tempfile
import
tempfile
import
time
import
time
...
@@ -66,10 +68,10 @@ def createCSR(common_name, ip=None):
...
@@ -66,10 +68,10 @@ def createCSR(common_name, ip=None):
subject_alternative_name_list
=
[]
subject_alternative_name_list
=
[]
if
ip
is
not
None
:
if
ip
is
not
None
:
subject_alternative_name_list
.
append
(
subject_alternative_name_list
.
append
(
x509
.
IPAddress
(
ipaddress
.
ip_address
(
unicode
(
ip
)
))
x509
.
IPAddress
(
ipaddress
.
ip_address
(
ip
))
)
)
csr
=
x509
.
CertificateSigningRequestBuilder
().
subject_name
(
x509
.
Name
([
csr
=
x509
.
CertificateSigningRequestBuilder
().
subject_name
(
x509
.
Name
([
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
unicode
(
common_name
)
),
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
common_name
),
]))
]))
if
len
(
subject_alternative_name_list
):
if
len
(
subject_alternative_name_list
):
...
@@ -89,10 +91,10 @@ class CertificateAuthority(object):
...
@@ -89,10 +91,10 @@ class CertificateAuthority(object):
public_key
=
self
.
key
.
public_key
()
public_key
=
self
.
key
.
public_key
()
builder
=
x509
.
CertificateBuilder
()
builder
=
x509
.
CertificateBuilder
()
builder
=
builder
.
subject_name
(
x509
.
Name
([
builder
=
builder
.
subject_name
(
x509
.
Name
([
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
unicode
(
common_name
)
),
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
common_name
),
]))
]))
builder
=
builder
.
issuer_name
(
x509
.
Name
([
builder
=
builder
.
issuer_name
(
x509
.
Name
([
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
unicode
(
common_name
)
),
x509
.
NameAttribute
(
NameOID
.
COMMON_NAME
,
common_name
),
]))
]))
builder
=
builder
.
not_valid_before
(
builder
=
builder
.
not_valid_before
(
datetime
.
datetime
.
utcnow
()
-
datetime
.
timedelta
(
days
=
2
))
datetime
.
datetime
.
utcnow
()
-
datetime
.
timedelta
(
days
=
2
))
...
@@ -147,16 +149,19 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
...
@@ -147,16 +149,19 @@ class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
response
=
{
response
=
{
'Path'
:
self
.
path
,
'Path'
:
self
.
path
,
}
}
self
.
wfile
.
write
(
json
.
dumps
(
response
,
indent
=
2
))
self
.
wfile
.
write
(
str2bytes
(
json
.
dumps
(
response
,
indent
=
2
)
))
class
CheckUrlAvailableMixin
(
TestPromisePluginMixin
):
class
CheckUrlAvailableMixin
(
TestPromisePluginMixin
):
@
classmethod
@
classmethod
def
setUpClass
(
cls
):
def
setUpClass
(
cls
):
cls
.
another_server_ca
=
CertificateAuthority
(
"Another Server Root CA"
)
cls
.
another_server_ca
=
CertificateAuthority
(
u"Another Server Root CA"
)
cls
.
test_server_ca
=
CertificateAuthority
(
"Test Server Root CA"
)
cls
.
test_server_ca
=
CertificateAuthority
(
u"Test Server Root CA"
)
ip
=
SLAPOS_TEST_IPV4
.
decode
(
'utf-8'
)
\
if
isinstance
(
SLAPOS_TEST_IPV4
,
bytes
)
\
else
SLAPOS_TEST_IPV4
key
,
key_pem
,
csr
,
csr_pem
=
createCSR
(
key
,
key_pem
,
csr
,
csr_pem
=
createCSR
(
"testserver.example.com"
,
SLAPOS_TEST_IPV4
)
u"testserver.example.com"
,
ip
)
_
,
cls
.
test_server_certificate_pem
=
cls
.
test_server_ca
.
signCSR
(
csr
)
_
,
cls
.
test_server_certificate_pem
=
cls
.
test_server_ca
.
signCSR
(
csr
)
cls
.
test_server_certificate_file
=
tempfile
.
NamedTemporaryFile
(
cls
.
test_server_certificate_file
=
tempfile
.
NamedTemporaryFile
(
...
@@ -175,17 +180,17 @@ class CheckUrlAvailableMixin(TestPromisePluginMixin):
...
@@ -175,17 +180,17 @@ class CheckUrlAvailableMixin(TestPromisePluginMixin):
cls
.
test_server_ca
.
certificate_pem
)
cls
.
test_server_ca
.
certificate_pem
)
cls
.
test_server_ca_certificate_file
.
close
()
cls
.
test_server_ca_certificate_file
.
close
()
def
server
():
server
=
BaseHTTPServer
.
HTTPServer
(
server
=
BaseHTTPServer
.
HTTPServer
(
(
SLAPOS_TEST_IPV4
,
SLAPOS_TEST_IPV4_PORT
),
(
SLAPOS_TEST_IPV4
,
SLAPOS_TEST_IPV4_PORT
),
TestHandler
)
TestHandler
)
server
.
socket
=
ssl
.
wrap_socket
(
server
.
socket
=
ssl
.
wrap_socket
(
server
.
socket
,
server
.
socket
,
certfile
=
cls
.
test_server_certificate_file
.
name
,
certfile
=
cls
.
test_server_certificate_file
.
name
,
server_side
=
True
)
server_side
=
True
)
server
.
serve_forever
()
cls
.
server_process
=
multiprocessing
.
Process
(
cls
.
server_process
=
multiprocessing
.
Process
(
target
=
server
)
target
=
server
.
serve_forever
)
cls
.
server_process
.
start
()
cls
.
server_process
.
start
()
@
classmethod
@
classmethod
...
@@ -269,7 +274,8 @@ class TestCheckUrlAvailable(CheckUrlAvailableMixin):
...
@@ -269,7 +274,8 @@ class TestCheckUrlAvailable(CheckUrlAvailableMixin):
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
result
[
'result'
][
'failed'
],
True
)
self
.
assertEqual
(
self
.
assertEqual
(
result
[
'result'
][
'message'
],
result
[
'result'
][
'message'
],
"ERROR: Invalid URL u'https://': No host supplied"
"ERROR: Invalid URL %s'https://': No host supplied"
%
(
''
if
six
.
PY3
else
'u'
)
)
)
def
test_check_url_malformed
(
self
):
def
test_check_url_malformed
(
self
):
...
...
slapos/test/promise/test_apache_mpm_watchdog.py
View file @
1c8269b2
...
@@ -87,7 +87,7 @@ class TestApacheMPMWatchdog(unittest.TestCase):
...
@@ -87,7 +87,7 @@ class TestApacheMPMWatchdog(unittest.TestCase):
self
.
assertEqual
(
None
,
self
.
assertEqual
(
None
,
getServerStatus
(
"http://localhost/"
,
getServerStatus
(
"http://localhost/"
,
"user"
,
"password"
))
"user"
,
"password"
))
self
.
assertNotEqual
s
(
None
,
self
.
assertNotEqual
(
None
,
getServerStatus
(
"https://www.erp5.com/"
,
None
,
None
))
getServerStatus
(
"https://www.erp5.com/"
,
None
,
None
))
...
...
slapos/test/test_agent.py
View file @
1c8269b2
...
@@ -29,58 +29,59 @@ import unittest
...
@@ -29,58 +29,59 @@ import unittest
import
os.path
import
os.path
from
slapos.agent.agent
import
AutoSTemp
,
TestMap
from
slapos.agent.agent
import
AutoSTemp
,
TestMap
TESTMAP_DICT
=
{
from
collections
import
OrderedDict
"test-wendelin-software-release"
:
{
TESTMAP_DICT
=
OrderedDict
([
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/wendelin/software.cfg"
,
(
"test-apache-frontend-software-release"
,
{
"supply_computer"
:
"COMP-2"
,
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/apache-frontend/software.cfg"
,
"group"
:
"COMP-2"
,
"title"
:
"test-wendelin-software-release"
},
"test-agent-software-release"
:
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/agent/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-agent-software-release"
},
"test-powerdns-software-release"
:
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/powerdns/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-powerdns-software-release"
},
"test-monitor-software-release"
:
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/monitor/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-monitor-software-release"
},
"test-slapos-master-software-release"
:
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slapos-master/software.cfg"
,
"supply_computer"
:
"COMP-1"
,
"supply_computer"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"title"
:
"test-
slapos-master-software-release"
},
"title"
:
"test-
apache-frontend-software-release"
}),
"test-webrunner-software-release"
:
{
(
"test-slapos-master-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slap
runner/software.cfg"
,
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/slap
os-master/software.cfg"
,
"supply_computer"
:
"COMP-1"
,
"supply_computer"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"title"
:
"test-webrunner-software-release"
},
"title"
:
"test-slapos-master-software-release"
}),
"test-re6stnetmaster-software-release"
:
{
(
"test-erp5testnode-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/re6stnet/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-re6stnetmaster-software-release"
},
"test-erp5testnode-software-release"
:
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/erp5testnode/software.cfg"
,
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/erp5testnode/software.cfg"
,
"supply_computer"
:
"COMP-1"
,
"supply_computer"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"title"
:
"test-erp5testnode-software-release"
}
,
"title"
:
"test-erp5testnode-software-release"
}
),
"test-apache-frontend-software-release"
:
{
(
"test-webrunner-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/
apache-frontend/software.cfg"
,
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/
slaprunner/software.cfg"
,
"supply_computer"
:
"COMP-1"
,
"supply_computer"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"title"
:
"test-apache-frontend-software-release"
},
"title"
:
"test-webrunner-software-release"
}),
"test-nayuos-software-release"
:
{
(
"test-agent-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/agent/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-agent-software-release"
}),
(
"test-powerdns-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/powerdns/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-powerdns-software-release"
}),
(
"test-nayuos-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/nayuos/software.cfg"
,
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/nayuos/software.cfg"
,
"supply_computer"
:
"COMP-1"
,
"supply_computer"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"group"
:
"COMP-1"
,
"title"
:
"test-nayuos-software-release"
}
"title"
:
"test-nayuos-software-release"
}),
}
(
"test-wendelin-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/wendelin/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-wendelin-software-release"
}),
(
"test-monitor-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/monitor/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-monitor-software-release"
}),
(
"test-re6stnetmaster-software-release"
,
{
"url"
:
"https://lab.nexedi.com/nexedi/slapos/raw/1.0/software/re6stnet/software.cfg"
,
"supply_computer"
:
"COMP-2"
,
"group"
:
"COMP-2"
,
"title"
:
"test-re6stnetmaster-software-release"
}),
])
...
@@ -158,7 +159,8 @@ class TestAutoSTemp(unittest.TestCase):
...
@@ -158,7 +159,8 @@ class TestAutoSTemp(unittest.TestCase):
removes it when deleted.
removes it when deleted.
"""
"""
f
=
AutoSTemp
(
"foo"
)
f
=
AutoSTemp
(
"foo"
)
self
.
assertEqual
(
open
(
f
.
name
,
"r"
).
read
(),
"foo"
)
with
open
(
f
.
name
,
"r"
)
as
f_
:
self
.
assertEqual
(
f_
.
read
(),
"foo"
)
fname
=
f
.
name
fname
=
f
.
name
self
.
assertTrue
(
os
.
path
.
isfile
(
fname
))
self
.
assertTrue
(
os
.
path
.
isfile
(
fname
))
del
f
del
f
...
...
slapos/test/test_checkfeedaspromise.py
View file @
1c8269b2
...
@@ -87,10 +87,10 @@ class TestCheckFeedAsPromise(unittest.TestCase):
...
@@ -87,10 +87,10 @@ class TestCheckFeedAsPromise(unittest.TestCase):
feed
=
self
.
generateKOFeed
()
feed
=
self
.
generateKOFeed
()
option
.
ko_pattern_list
=
[
'Error'
]
option
.
ko_pattern_list
=
[
'Error'
]
self
.
assertNotEqual
s
(
checkFeedAsPromise
(
feed
,
option
),
""
)
self
.
assertNotEqual
(
checkFeedAsPromise
(
feed
,
option
),
""
)
option
.
title
,
option
.
description
=
False
,
True
option
.
title
,
option
.
description
=
False
,
True
option
.
ko_pattern_list
=
[
'FAILURE'
,
'Error'
]
option
.
ko_pattern_list
=
[
'FAILURE'
,
'Error'
]
self
.
assertNotEqual
s
(
checkFeedAsPromise
(
feed
,
option
),
""
)
self
.
assertNotEqual
(
checkFeedAsPromise
(
feed
,
option
),
""
)
def
test_ifNoOKPatternFoundErrorIsRaised
(
self
):
def
test_ifNoOKPatternFoundErrorIsRaised
(
self
):
...
@@ -100,7 +100,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
...
@@ -100,7 +100,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
# If no time buffer, then not OK is always wrong
# If no time buffer, then not OK is always wrong
option
.
ok_pattern_list
=
[
'OK'
]
option
.
ok_pattern_list
=
[
'OK'
]
self
.
assertNotEqual
s
(
len
(
checkFeedAsPromise
(
feed
,
option
)),
0
)
self
.
assertNotEqual
(
len
(
checkFeedAsPromise
(
feed
,
option
)),
0
)
# if time buffer, then not OK is wrong only after buffer expires
# if time buffer, then not OK is wrong only after buffer expires
extra_item
=
{
extra_item
=
{
...
@@ -115,7 +115,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
...
@@ -115,7 +115,7 @@ class TestCheckFeedAsPromise(unittest.TestCase):
# shorter buffer, we want to raise an error
# shorter buffer, we want to raise an error
option
.
time_buffer
=
1800
option
.
time_buffer
=
1800
self
.
assertNotEqual
s
(
len
(
checkFeedAsPromise
(
feed
,
option
)),
0
)
self
.
assertNotEqual
(
len
(
checkFeedAsPromise
(
feed
,
option
)),
0
)
def
test_noItemInTheFeedIsNotAnError
(
self
):
def
test_noItemInTheFeedIsNotAnError
(
self
):
...
...
slapos/test/test_generatefeed.py
View file @
1c8269b2
...
@@ -7,6 +7,7 @@ import shutil
...
@@ -7,6 +7,7 @@ import shutil
import
tempfile
import
tempfile
import
time
import
time
import
unittest
import
unittest
import
six
from
slapos.generatefeed
import
generateFeed
from
slapos.generatefeed
import
generateFeed
...
@@ -120,7 +121,7 @@ class TestGenerateFeed(unittest.TestCase):
...
@@ -120,7 +121,7 @@ class TestGenerateFeed(unittest.TestCase):
for
i
in
range
(
5
-
3
,
5
):
# older items (from 1 to 2) have been deleted
for
i
in
range
(
5
-
3
,
5
):
# older items (from 1 to 2) have been deleted
expected_remaining_item_list
.
append
(
'%s.item'
%
i
)
expected_remaining_item_list
.
append
(
'%s.item'
%
i
)
s
elf
.
assertItemsEqual
(
remaining_status_item_list
,
s
ix
.
assertCountEqual
(
self
,
remaining_status_item_list
,
expected_remaining_item_list
)
expected_remaining_item_list
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
...
...
slapos/test/test_qemuqmpclient.py
View file @
1c8269b2
...
@@ -140,7 +140,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
...
@@ -140,7 +140,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif
message
[
'execute'
]
==
'query-memory-devices'
:
elif
message
[
'execute'
]
==
'query-memory-devices'
:
memory_list
=
[]
memory_list
=
[]
added_mem
=
self
.
readChange
(
'dimm'
)
+
self
.
hotplugged_memory_amount
added_mem
=
self
.
readChange
(
'dimm'
)
+
self
.
hotplugged_memory_amount
slot_amount
=
added_mem
/
self
.
memory_slot_size
slot_amount
=
added_mem
/
/
self
.
memory_slot_size
for
i
in
range
(
slot_amount
,
0
,
-
1
):
for
i
in
range
(
slot_amount
,
0
,
-
1
):
memory_list
.
append
({
memory_list
.
append
({
u'data'
:
{
u'data'
:
{
...
@@ -159,7 +159,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
...
@@ -159,7 +159,7 @@ class TestQemuQMPWrapper(unittest.TestCase):
elif
message
[
'execute'
]
==
'query-memdev'
:
elif
message
[
'execute'
]
==
'query-memdev'
:
memory_list
=
[]
memory_list
=
[]
added_mem
=
self
.
readChange
(
'dimm'
)
+
self
.
hotplugged_memory_amount
added_mem
=
self
.
readChange
(
'dimm'
)
+
self
.
hotplugged_memory_amount
slot_amount
=
added_mem
/
self
.
memory_slot_size
slot_amount
=
added_mem
/
/
self
.
memory_slot_size
for
i
in
range
(
slot_amount
,
0
,
-
1
):
for
i
in
range
(
slot_amount
,
0
,
-
1
):
memory_list
.
append
({
memory_list
.
append
({
u'dump'
:
True
,
u'dump'
:
True
,
...
...
slapos/test/test_runner.py
View file @
1c8269b2
...
@@ -3,7 +3,6 @@ import os
...
@@ -3,7 +3,6 @@ import os
import
string
import
string
import
random
import
random
import
supervisor
import
supervisor
import
thread
import
unittest
import
unittest
...
@@ -29,14 +28,6 @@ class TestRunnerBackEnd(unittest.TestCase):
...
@@ -29,14 +28,6 @@ class TestRunnerBackEnd(unittest.TestCase):
if
os
.
path
.
exists
(
garbage_file
):
if
os
.
path
.
exists
(
garbage_file
):
os
.
remove
(
garbage_file
)
os
.
remove
(
garbage_file
)
def
_startSupervisord
(
self
):
cwd
=
os
.
getcwd
()
supervisord_config_file
=
os
.
path
.
join
(
cwd
,
'supervisord.conf'
)
open
(
supervisord_config_file
,
'w'
).
write
(
"""
"""
)
supervisord
=
supervisor
.
supervisord
.
Supervisord
(
'-c'
,
supervisord_config_file
)
thread
.
start_new_thread
()
def
test_UserCanLoginAndUpdateCredentials
(
self
):
def
test_UserCanLoginAndUpdateCredentials
(
self
):
"""
"""
* Create a user with createNewUser
* Create a user with createNewUser
...
...
slapos/test/test_runner_exporter.py
View file @
1c8269b2
...
@@ -5,7 +5,6 @@ import time
...
@@ -5,7 +5,6 @@ import time
import
unittest
import
unittest
from
slapos.resilient
import
runner_exporter
from
slapos.resilient
import
runner_exporter
from
StringIO
import
StringIO
tested_instance_cfg
=
"""[buildout]
tested_instance_cfg
=
"""[buildout]
installed_develop_eggs =
installed_develop_eggs =
...
@@ -75,7 +74,7 @@ class TestRunnerExporter(unittest.TestCase):
...
@@ -75,7 +74,7 @@ class TestRunnerExporter(unittest.TestCase):
def
_createExecutableFile
(
self
,
path
,
content
=
''
):
def
_createExecutableFile
(
self
,
path
,
content
=
''
):
self
.
_createFile
(
path
,
content
)
self
.
_createFile
(
path
,
content
)
os
.
chmod
(
path
,
0700
)
os
.
chmod
(
path
,
0
o
700
)
def
_setUpFakeInstanceFolder
(
self
):
def
_setUpFakeInstanceFolder
(
self
):
self
.
_createFile
(
'proxy.db'
)
self
.
_createFile
(
'proxy.db'
)
...
@@ -232,9 +231,9 @@ class TestRunnerExporter(unittest.TestCase):
...
@@ -232,9 +231,9 @@ class TestRunnerExporter(unittest.TestCase):
self
.
assertEqual
(
self
.
assertEqual
(
runner_exporter
.
getBackupFilesModifiedDuringExportList
(
config
,
time
.
time
()
-
5
),
runner_exporter
.
getBackupFilesModifiedDuringExportList
(
config
,
time
.
time
()
-
5
),
[
'instance/slappart0/srv/backup/data.dat'
,
[
b
'instance/slappart0/srv/backup/data.dat'
,
'instance/slappart0/srv/backup/important_logs/this_is_a.log'
,
b
'instance/slappart0/srv/backup/important_logs/this_is_a.log'
,
'instance/slappart1/srv/backup/data.dat'
]
b
'instance/slappart1/srv/backup/data.dat'
]
)
)
time
.
sleep
(
2
)
time
.
sleep
(
2
)
self
.
assertFalse
(
self
.
assertFalse
(
...
@@ -243,5 +242,5 @@ class TestRunnerExporter(unittest.TestCase):
...
@@ -243,5 +242,5 @@ class TestRunnerExporter(unittest.TestCase):
self
.
_createFile
(
'instance/slappart1/srv/backup/bakckup.data'
,
'my backup'
)
self
.
_createFile
(
'instance/slappart1/srv/backup/bakckup.data'
,
'my backup'
)
self
.
assertEqual
(
self
.
assertEqual
(
runner_exporter
.
getBackupFilesModifiedDuringExportList
(
config
,
time
.
time
()
-
1
),
runner_exporter
.
getBackupFilesModifiedDuringExportList
(
config
,
time
.
time
()
-
1
),
[
'instance/slappart1/srv/backup/bakckup.data'
]
[
b
'instance/slappart1/srv/backup/bakckup.data'
]
)
)
slapos/test/test_securedelete.py
View file @
1c8269b2
...
@@ -58,32 +58,32 @@ class TestSecureDelete(unittest.TestCase):
...
@@ -58,32 +58,32 @@ class TestSecureDelete(unittest.TestCase):
passes
=
2
+
1
# Option -z is used, plus one more pass
passes
=
2
+
1
# Option -z is used, plus one more pass
result
=
shred
(
options
)
result
=
shred
(
options
)
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
def
test_secure_remove_file_keep_file
(
self
):
def
test_secure_remove_file_keep_file
(
self
):
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-z'
,
'--file'
,
self
.
remove_file
])
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-z'
,
'--file'
,
self
.
remove_file
])
passes
=
2
+
1
# Option -z is used, plus one more pass
passes
=
2
+
1
# Option -z is used, plus one more pass
result
=
shred
(
options
)
result
=
shred
(
options
)
self
.
assertTrue
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertTrue
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
False
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
NotIn
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
def
test_secure_remove_file_non_zero
(
self
):
def
test_secure_remove_file_non_zero
(
self
):
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'--file'
,
self
.
remove_file
])
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'--file'
,
self
.
remove_file
])
passes
=
2
passes
=
2
result
=
shred
(
options
)
result
=
shred
(
options
)
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
def
test_secure_remove_file_check_exist
(
self
):
def
test_secure_remove_file_check_exist
(
self
):
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'-s'
,
'--file'
,
'random.txt'
,
self
.
remove_file
])
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'-s'
,
'--file'
,
'random.txt'
,
self
.
remove_file
])
passes
=
2
passes
=
2
result
=
shred
(
options
)
result
=
shred
(
options
)
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
def
test_secure_remove_file_check_exist_false
(
self
):
def
test_secure_remove_file_check_exist_false
(
self
):
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'--file'
,
'random.txt'
])
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'--file'
,
'random.txt'
])
...
@@ -99,19 +99,19 @@ class TestSecureDelete(unittest.TestCase):
...
@@ -99,19 +99,19 @@ class TestSecureDelete(unittest.TestCase):
# shred removed link and target file
# shred removed link and target file
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
link_name
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
link_name
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
def
test_secure_remove_file_multiple_files
(
self
):
def
test_secure_remove_file_multiple_files
(
self
):
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'-z'
,
'--file'
,
self
.
remove_file
,
self
.
remove_file2
])
options
=
getAgumentParser
().
parse_args
([
'-n'
,
'2'
,
'-u'
,
'-z'
,
'--file'
,
self
.
remove_file
,
self
.
remove_file2
])
passes
=
2
+
1
# Option -z is used, plus one more pass
passes
=
2
+
1
# Option -z is used, plus one more pass
result
=
shred
(
options
)
result
=
shred
(
options
)
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file
))
self
.
assert
True
(
"pass %s/%s"
%
(
passes
,
passes
)
in
result
)
self
.
assert
In
(
"pass %d/%d"
%
(
passes
,
passes
),
result
)
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file
),
result
)
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file2
))
self
.
assertFalse
(
os
.
path
.
exists
(
self
.
remove_file2
))
self
.
assert
True
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file2
)
in
result
)
self
.
assert
In
(
"%s: removed"
%
os
.
path
.
basename
(
self
.
remove_file2
),
result
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
unittest
.
main
()
unittest
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment