Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Z
Zope
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
Zope
Commits
b426828b
Commit
b426828b
authored
Jul 02, 2011
by
Hanno Schlichting
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Removed a couple of very old scripts written in 2002-2004
parent
aa5c1b27
Changes
9
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
1 addition
and
1890 deletions
+1
-1890
src/Zope2/utilities/README.txt
src/Zope2/utilities/README.txt
+0
-28
src/Zope2/utilities/__init__.py
src/Zope2/utilities/__init__.py
+1
-1
src/Zope2/utilities/check_catalog.py
src/Zope2/utilities/check_catalog.py
+0
-143
src/Zope2/utilities/compilezpy.py
src/Zope2/utilities/compilezpy.py
+0
-68
src/Zope2/utilities/decompilezpy.py
src/Zope2/utilities/decompilezpy.py
+0
-27
src/Zope2/utilities/fixheaders.py
src/Zope2/utilities/fixheaders.py
+0
-26
src/Zope2/utilities/load_site.py
src/Zope2/utilities/load_site.py
+0
-303
src/Zope2/utilities/requestprofiler.py
src/Zope2/utilities/requestprofiler.py
+0
-841
src/Zope2/utilities/tracelog.py
src/Zope2/utilities/tracelog.py
+0
-453
No files found.
src/Zope2/utilities/README.txt
deleted
100644 → 0
View file @
aa5c1b27
This directory contains utility scripts and modules that augment Zope.
To get detailed usage information, run any of these scripts without arguments:
load_site.py -- Load a Zope site from files and directories
This script illustrates used of the Zope RPC mechanism,
ZPublisher.Client. It provides some examples of pitfalls
and their work-arounds.
check_catalog.py -- Perform some consistency tests on a ZCatalog instance
mkzopeinstance.py -- create a Zope instance home
copyzopeskel.py -- copy a Zope instance home skeleton directory to target
mkzeoinstance.py -- create a ZEO instance home
requestprofiler.py -- parse and analyze the Zope "detailed" log file
zpasswd.py -- generate "access" or "inituser" files for use with Zope
compilezpy.py -- compile all .py files to .pyc files in the current
directory and below
decompilezpy.py -- remove all .py[co] files in the current directory
and below
src/Zope2/utilities/__init__.py
View file @
b426828b
#
placeholder
#
\ No newline at end of file
src/Zope2/utilities/check_catalog.py
deleted
100644 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Script to check consistency of a ZCatalog
"""
import
Zope2
import
os
,
sys
,
re
,
getopt
from
types
import
IntType
from
BTrees.IIBTree
import
IISet
,
difference
,
intersection
def
checkCatalog
(
path
,
indexes
):
""" perform some consistency checks on a ZCatalog instance"""
root
=
Zope2
.
app
()
try
:
catalog
=
root
.
unrestrictedTraverse
(
path
)
except
AttributeError
:
print
'Error: catalog object not found'
sys
.
exit
(
1
)
# get Catalog instance
_cat
=
catalog
.
_catalog
# check Catalog internal BTrees
l_data
=
list
(
_cat
.
data
.
keys
())
l_data
.
sort
()
l_uids
=
list
(
_cat
.
uids
.
values
())
l_uids
.
sort
()
l_paths
=
list
(
_cat
.
data
.
keys
())
l_paths
.
sort
()
print
"Checking catalog internal BTrees"
print
"
\
t
INFO: Mapping data: %d entries"
%
len
(
l_data
)
print
"
\
t
INFO: Mapping uids: %d entries"
%
len
(
l_uids
)
print
"
\
t
INFO: Mapping paths: %d entries"
%
len
(
l_paths
)
if
l_data
==
l_uids
:
print
"
\
t
OK: Mapping data equals Mapping uids"
else
:
print
"
\
t
ERR: Mapping data does not equal Mapping uids"
if
l_data
==
l_paths
:
print
"
\
t
OK: Mapping data equals Maaping paths"
else
:
print
"
\
t
ERR: Mapping data does not equal Maaping paths"
# check BTrees of indexes
for
id
,
idx
in
_cat
.
indexes
.
items
():
if
indexes
and
not
idx
.
meta_type
in
indexes
:
continue
print
"Checking index '%s' (type: %s)"
%
(
id
,
idx
.
meta_type
)
if
idx
.
meta_type
in
[
'FieldIndex'
,
'KeywordIndex'
]:
# check forward entries
RIDS
=
IISet
()
for
key
,
rids
in
idx
.
_index
.
items
():
if
isinstance
(
rids
,
IntType
):
RIDS
.
insert
(
rids
)
else
:
map
(
RIDS
.
insert
,
rids
.
keys
())
diff
=
difference
(
RIDS
,
IISet
(
_cat
.
data
.
keys
()))
if
len
(
diff
)
!=
0
:
print
'
\
t
ERR: Problem with forward entries'
print
'
\
t
ERR: too much forward entries:'
,
diff
else
:
print
'
\
t
OK: Forward entries (%d entries)'
%
(
len
(
RIDS
))
elif
idx
.
meta_type
in
[
'PathIndex'
]:
RIDS
=
IISet
()
for
rids
in
map
(
None
,
idx
.
_index
.
values
()):
map
(
RIDS
.
insert
,
rids
.
values
()[
0
])
diff
=
difference
(
RIDS
,
IISet
(
_cat
.
data
.
keys
()))
if
len
(
diff
)
!=
0
:
print
'
\
t
ERR: Problem with forward entries'
print
'
\
t
ERR: too much forward entries:'
,
diff
else
:
print
'
\
t
OK: Forward entries (%d entries)'
%
(
len
(
RIDS
))
if
idx
.
meta_type
in
[
'FieldIndex'
,
'KeywordIndex'
,
'PathIndex'
]:
# check backward entries
RIDS
=
IISet
(
idx
.
_unindex
.
keys
())
diff
=
difference
(
RIDS
,
IISet
(
_cat
.
data
.
keys
()))
if
len
(
diff
)
!=
0
:
print
'
\
t
ERR: Problem with backward entries'
print
'
\
t
ERR: too much backward entries:'
,
diff
else
:
print
'
\
t
OK: Backward entries (%d entries)'
%
(
len
(
RIDS
))
def
usage
():
print
"Usage: %s [--FieldIndex|KeywordIndex|PathIndex] /path/to/ZCatalog"
%
\
os
.
path
.
basename
(
sys
.
argv
[
0
])
print
print
"This scripts checks the consistency of the internal"
print
"BTrees of a ZCatalog and its indexes."
sys
.
exit
(
1
)
def
main
():
opts
,
args
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
'h'
,
\
[
'help'
,
'FieldIndex'
,
'KeywordIndex'
,
'PathIndex'
])
indexes
=
[]
for
o
,
v
in
opts
:
if
o
in
[
'-h'
,
'--help'
]:
usage
()
if
o
in
[
'--FieldIndex'
,
'--KeywordIndex'
,
'--PathIndex'
]:
indexes
.
append
(
o
[
2
:])
checkCatalog
(
args
,
indexes
)
if
__name__
==
'__main__'
:
main
()
src/Zope2/utilities/compilezpy.py
deleted
100755 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import
compileall
,
os
,
sys
class
Shutup
:
def
write
(
*
args
):
pass
# :)
class
NoteErr
:
wrote
=
0
def
write
(
self
,
*
args
):
self
.
wrote
=
1
apply
(
stderr
.
write
,
args
)
def
compile_non_test
(
dir
):
"""Byte-compile all modules except those in test directories."""
success
=
compileall
.
compile_dir
(
dir
,
maxlevels
=
0
)
try
:
names
=
os
.
listdir
(
dir
)
except
os
.
error
:
print
"Can't list"
,
dir
names
=
[]
names
.
sort
()
for
name
in
names
:
fullname
=
os
.
path
.
join
(
dir
,
name
)
if
(
name
!=
os
.
curdir
and
name
!=
os
.
pardir
and
os
.
path
.
isdir
(
fullname
)
and
not
os
.
path
.
islink
(
fullname
)
and
name
!=
'test'
and
name
!=
'tests'
and
name
!=
'skins'
):
success
=
success
and
compile_non_test
(
fullname
)
return
success
print
print
'-'
*
78
print
'Compiling python modules'
stdout
=
sys
.
stdout
stderr
=
sys
.
stderr
try
:
try
:
success
=
0
sys
.
stdout
=
Shutup
()
sys
.
stderr
=
NoteErr
()
success
=
compile_non_test
(
os
.
getcwd
())
finally
:
success
=
success
and
not
sys
.
stderr
.
wrote
sys
.
stdout
=
stdout
sys
.
stderr
=
stderr
except
:
success
=
0
import
traceback
traceback
.
print_exc
()
if
not
success
:
print
print
'!'
*
78
print
'There were errors during Python module compilation.'
print
'!'
*
78
print
sys
.
exit
(
1
)
src/Zope2/utilities/decompilezpy.py
deleted
100755 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import
os
import
sys
def
main
(
dirname
):
os
.
path
.
walk
(
dirname
,
rmpycs
,
None
)
def
rmpycs
(
arg
,
dirname
,
names
):
for
name
in
names
:
path
=
os
.
path
.
join
(
dirname
,
name
)
if
(
name
.
endswith
(
'.pyc'
)
or
name
.
endswith
(
'.pyo'
)
and
os
.
path
.
isfile
(
path
)
):
os
.
unlink
(
path
)
if
__name__
==
'__main__'
:
main
(
sys
.
argv
[
1
])
src/Zope2/utilities/fixheaders.py
deleted
100644 → 0
View file @
aa5c1b27
# Script to fix the header files to ZPL 2.1
import
os
for
dirpath
,
dirnames
,
filenames
in
os
.
walk
(
'.'
):
for
fname
in
filenames
:
base
,
ext
=
os
.
path
.
splitext
(
fname
)
if
not
ext
in
(
'.py'
,
'.c'
,
'.h'
):
continue
fullname
=
os
.
path
.
join
(
dirpath
,
fname
)
if
'.svn'
in
fullname
:
continue
data
=
open
(
fullname
).
read
()
changed
=
False
if
'Version 2.1 (ZPL)'
in
data
:
data
=
data
.
replace
(
'Version 2.1 (ZPL)'
,
'Version 2.1 (ZPL)'
)
changed
=
True
if
'(c) 2002 Zope Corporation'
in
data
:
data
=
data
.
replace
(
'(c) 2002 Zope Corporation'
,
'(c) 2002 Zope Corporation'
)
changed
=
True
print
fullname
,
changed
if
changed
:
open
(
fullname
,
'w'
).
write
(
data
)
src/Zope2/utilities/load_site.py
deleted
100644 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Load a Zope site from a collection of files or directories
"""
usage
=
""" [options] url file .....
where options are:
-D
For HTML documents, replace the start of the content, up to
and including the opening body tag with a DTML var tag that
inserts the standard header. Also replace the closing body
and html tag with a DTML var tag that inserts the standard
footer.
-I
For each index.html, add an index_html that redirects.
-p path
Path to ZPublisher. If not provided, load_site will
make an attempt to figure it out.
-u user:password
Credentials
-v
Run in verbose mode.
-9
Use *old* zope method names.
"""
import
sys
,
getopt
,
os
,
string
ServerError
=
''
verbose
=
0
old
=
0
doctor
=
0
index_html
=
0
def
main
():
user
,
password
=
'superuser'
,
'123'
opts
,
args
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
'p:u:DIv9'
)
global
verbose
global
old
global
doctor
global
index_html
havepath
=
None
for
o
,
v
in
opts
:
if
o
==
'-p'
:
d
,
f
=
os
.
path
.
split
(
v
)
if
f
==
'ZPublisher'
:
sys
.
path
.
insert
(
0
,
d
)
else
:
sys
.
path
.
insert
(
0
,
v
)
havepath
=
1
elif
o
==
'-u'
:
v
=
string
.
split
(
v
,
':'
)
user
,
password
=
v
[
0
],
string
.
join
(
v
[
1
:],
':'
)
elif
o
==
'-D'
:
doctor
=
1
elif
o
==
'-I'
:
index_html
=
1
elif
o
==
'-v'
:
verbose
=
1
elif
o
==
'-9'
:
old
=
1
if
not
args
:
print
sys
.
argv
[
0
]
+
usage
sys
.
exit
(
1
)
if
not
havepath
:
here
=
os
.
path
.
split
(
sys
.
argv
[
0
])[
0
]
if
os
.
path
.
exists
(
os
.
path
.
join
(
here
,
'ZPublisher'
)):
sys
.
path
.
insert
(
0
,
here
)
else
:
here
=
os
.
path
.
split
(
here
)[
0
]
here
=
os
.
path
.
join
(
here
,
'lib'
,
'python'
)
if
os
.
path
.
exists
(
os
.
path
.
join
(
here
,
'ZPublisher'
)):
sys
.
path
.
insert
(
0
,
here
)
url
=
args
[
0
]
files
=
args
[
1
:]
import
ZPublisher.Client
global
ServerError
ServerError
=
ZPublisher
.
Client
.
ServerError
object
=
ZPublisher
.
Client
.
Object
(
url
,
username
=
user
,
password
=
password
)
for
f
in
files
:
upload_file
(
object
,
f
)
def
call
(
f
,
*
args
,
**
kw
):
# Call a function ignoring redirect bci errors.
try
:
apply
(
f
,
args
,
kw
)
except
ServerError
,
v
:
if
str
(
v
)[:
1
]
!=
'3'
:
raise
sys
.
exc_info
()[
0
],
sys
.
exc_info
()[
1
],
sys
.
exc_info
()[
2
]
def
upload_file
(
object
,
f
):
if
os
.
path
.
isdir
(
f
):
return
upload_dir
(
object
,
f
)
dir
,
name
=
os
.
path
.
split
(
f
)
root
,
ext
=
os
.
path
.
splitext
(
name
)
if
ext
in
(
'file'
,
'dir'
):
ext
=
''
else
:
ext
=
string
.
lower
(
ext
)
if
ext
and
ext
[
0
]
in
'.'
:
ext
=
ext
[
1
:]
if
ext
and
globals
().
has_key
(
'upload_'
+
ext
):
if
verbose
:
print
'upload_'
+
ext
,
f
return
globals
()[
'upload_'
+
ext
](
object
,
f
)
if
verbose
:
print
'upload_file'
,
f
,
ext
call
(
object
.
manage_addFile
,
id
=
name
,
file
=
open
(
f
,
'rb'
))
def
upload_dir
(
object
,
f
):
if
verbose
:
print
'upload_dir'
,
f
dir
,
name
=
os
.
path
.
split
(
f
)
call
(
object
.
manage_addFolder
,
id
=
name
)
object
=
object
.
__class__
(
object
.
url
+
'/'
+
name
,
username
=
object
.
username
,
password
=
object
.
password
)
for
n
in
os
.
listdir
(
f
):
upload_file
(
object
,
os
.
path
.
join
(
f
,
n
))
# ----- phd -----
# Modified by Oleg Broytmann <phd2@earthling.net>
from
sgmllib
import
SGMLParser
def
join_attrs
(
attrs
):
attr_list
=
[]
for
attrname
,
value
in
attrs
:
attr_list
.
append
(
'%s="%s"'
%
(
attrname
,
string
.
strip
(
value
)))
if
attr_list
:
s
=
" "
+
string
.
join
(
attr_list
,
" "
)
else
:
s
=
""
return
s
class
HeadParser
(
SGMLParser
):
def
__init__
(
self
):
SGMLParser
.
__init__
(
self
)
self
.
seen_starthead
=
0
self
.
seen_endhead
=
0
self
.
seen_startbody
=
0
self
.
head
=
""
self
.
title
=
""
self
.
accumulator
=
""
def
handle_data
(
self
,
data
):
if
data
:
self
.
accumulator
=
self
.
accumulator
+
data
def
handle_charref
(
self
,
ref
):
self
.
handle_data
(
"&#%s;"
%
ref
)
def
handle_entityref
(
self
,
ref
):
self
.
handle_data
(
"&%s;"
%
ref
)
def
handle_comment
(
self
,
data
):
if
data
:
self
.
accumulator
=
self
.
accumulator
+
"<!--%s-->"
%
data
def
start_head
(
self
,
attrs
):
if
not
self
.
seen_starthead
:
self
.
seen_starthead
=
1
self
.
head
=
""
self
.
title
=
""
self
.
accumulator
=
""
def
end_head
(
self
):
if
not
self
.
seen_endhead
:
self
.
seen_endhead
=
1
self
.
head
=
self
.
head
+
self
.
accumulator
self
.
accumulator
=
""
def
start_title
(
self
,
attrs
):
self
.
head
=
self
.
head
+
self
.
accumulator
self
.
accumulator
=
""
def
end_title
(
self
):
self
.
title
=
self
.
accumulator
self
.
accumulator
=
""
def
start_body
(
self
,
attrs
):
if
not
self
.
seen_startbody
:
self
.
seen_startbody
=
1
self
.
accumulator
=
""
def
end_body
(
self
):
pass
# Do not put </BODY> and </HTML>
def
end_html
(
self
):
pass
# into output stream
# Pass other tags unmodified
def
unknown_starttag
(
self
,
tag
,
attrs
):
self
.
accumulator
=
self
.
accumulator
+
"<%s%s>"
%
(
string
.
upper
(
tag
),
join_attrs
(
attrs
))
def
unknown_endtag
(
self
,
tag
):
self
.
accumulator
=
self
.
accumulator
+
"</%s>"
%
string
.
upper
(
tag
)
def
parse_html
(
infile
):
parser
=
HeadParser
()
while
1
:
line
=
infile
.
readline
()
if
not
line
:
break
parser
.
feed
(
line
)
parser
.
close
()
infile
.
close
()
return
(
string
.
strip
(
parser
.
title
),
string
.
strip
(
parser
.
head
),
string
.
strip
(
parser
.
accumulator
))
def
upload_html
(
object
,
f
):
dir
,
name
=
os
.
path
.
split
(
f
)
f
=
open
(
f
)
if
doctor
:
title
,
head
,
body
=
parse_html
(
f
)
if
old
:
body
=
(
"<!--#var standard_html_header-->
\
n
\
n
"
+
body
+
"
\
n
\
n
<!--#var standard_html_footer-->"
)
else
:
body
=
(
"<html><head><title><dtml-var title_or_id></title>"
"</head><body bgcolor=
\
"
#FFFFFF
\
"
>
\
n
\
n
"
+
body
+
"
\
n
\
n
</body></html>"
)
else
:
if
old
:
f
=
f
.
read
()
title
,
head
,
body
=
''
,
''
,
f
if
old
:
call
(
object
.
manage_addDocument
,
id
=
name
,
file
=
body
)
if
index_html
and
name
in
(
'index.html'
,
'index.htm'
):
call
(
object
.
manage_addDocument
,
id
=
'index_html'
,
file
=
(
'<!--#raise Redirect-->'
'<!--#var URL1-->/%s'
'<!--#/raise-->'
%
name
))
else
:
call
(
object
.
manage_addDTMLDocument
,
id
=
name
,
title
=
title
,
file
=
body
)
if
index_html
and
name
in
(
'index.html'
,
'index.htm'
):
call
(
object
.
manage_addDTMLMethod
,
id
=
'index_html'
,
file
=
(
'<dtml-raise Redirect>'
'<dtml-var URL1>/%s'
'</dtml-raise>'
%
name
))
# Now add META and other tags as property
if
head
:
object
=
object
.
__class__
(
object
.
url
+
'/'
+
name
,
username
=
object
.
username
,
password
=
object
.
password
)
call
(
object
.
manage_addProperty
,
id
=
"loadsite-head"
,
type
=
"text"
,
value
=
head
)
# ----- /phd -----
upload_htm
=
upload_html
def
upload_dtml
(
object
,
f
):
dir
,
name
=
os
.
path
.
split
(
f
)
f
=
open
(
f
)
if
old
:
f
=
f
.
read
()
call
(
object
.
manage_addDocument
,
id
=
name
,
file
=
f
)
else
:
call
(
object
.
manage_addDTMLMethod
,
id
=
name
,
file
=
f
)
def
upload_gif
(
object
,
f
):
dir
,
name
=
os
.
path
.
split
(
f
)
call
(
object
.
manage_addImage
,
id
=
name
,
file
=
open
(
f
,
'rb'
))
upload_jpg
=
upload_gif
upload_png
=
upload_gif
if
__name__
==
'__main__'
:
main
()
src/Zope2/utilities/requestprofiler.py
deleted
100644 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Request log profiler script
"""
import
sys
,
time
,
getopt
,
math
,
cPickle
from
types
import
StringType
try
:
import
gzip
except
:
pass
class
ProfileException
(
Exception
):
pass
class
Request
:
def
__init__
(
self
):
self
.
url
=
None
self
.
start
=
None
self
.
method
=
None
self
.
t_recdinput
=
None
self
.
isize
=
None
self
.
t_recdoutput
=
None
self
.
osize
=
None
self
.
httpcode
=
None
self
.
t_end
=
None
self
.
elapsed
=
"I"
self
.
active
=
0
def
put
(
self
,
code
,
t
,
desc
):
if
code
not
in
(
'A'
,
'B'
,
'I'
,
'E'
):
raise
"unknown request code %s"
%
code
if
code
==
'B'
:
self
.
start
=
t
self
.
method
,
self
.
url
=
desc
.
strip
().
split
()
elif
code
==
"I"
:
self
.
t_recdinput
=
t
self
.
isize
=
desc
.
strip
()
elif
code
==
"A"
:
self
.
t_recdoutput
=
t
self
.
httpcode
,
self
.
osize
=
desc
.
strip
().
split
()
elif
code
==
'E'
:
self
.
t_end
=
t
self
.
elapsed
=
int
(
self
.
t_end
-
self
.
start
)
def
isfinished
(
self
):
return
not
self
.
elapsed
==
"I"
def
prettystart
(
self
):
if
self
.
start
is
not
None
:
t
=
time
.
localtime
(
self
.
start
)
return
time
.
strftime
(
'%Y-%m-%dT%H:%M:%S'
,
t
)
else
:
return
"NA"
def
shortprettystart
(
self
):
if
self
.
start
is
not
None
:
t
=
time
.
localtime
(
self
.
start
)
return
time
.
strftime
(
'%H:%M:%S'
,
t
)
else
:
return
"NA"
def
win
(
self
):
if
self
.
t_recdinput
is
not
None
and
self
.
start
is
not
None
:
return
self
.
t_recdinput
-
self
.
start
else
:
return
"NA"
def
wout
(
self
):
if
self
.
t_recdoutput
is
not
None
and
self
.
t_recdinput
is
not
None
:
return
self
.
t_recdoutput
-
self
.
t_recdinput
else
:
return
"NA"
def
wend
(
self
):
if
self
.
t_end
is
not
None
and
self
.
t_recdoutput
is
not
None
:
return
self
.
t_end
-
self
.
t_recdoutput
else
:
return
"NA"
def
endstage
(
self
):
if
self
.
t_end
is
not
None
:
stage
=
"E"
elif
self
.
t_recdoutput
is
not
None
:
stage
=
"A"
elif
self
.
t_recdinput
is
not
None
:
stage
=
"I"
else
:
stage
=
"B"
return
stage
def
total
(
self
):
stage
=
self
.
endstage
()
if
stage
==
"B"
:
return
0
if
stage
==
"I"
:
return
self
.
t_recdinput
-
self
.
start
if
stage
==
"A"
:
return
self
.
t_recdoutput
-
self
.
start
if
stage
==
"E"
:
return
self
.
elapsed
def
prettyisize
(
self
):
if
self
.
isize
is
not
None
:
return
self
.
isize
else
:
return
"NA"
def
prettyosize
(
self
):
if
self
.
osize
is
not
None
:
return
self
.
osize
else
:
return
"NA"
def
prettyhttpcode
(
self
):
if
self
.
httpcode
is
not
None
:
return
self
.
httpcode
else
:
return
"NA"
def
__str__
(
self
):
body
=
(
self
.
prettystart
(),
self
.
win
(),
self
.
wout
(),
self
.
wend
(),
self
.
total
(),
self
.
endstage
(),
self
.
prettyosize
(),
self
.
prettyhttpcode
(),
self
.
active
,
self
.
url
)
return
self
.
fmt
%
body
fmt
=
"%19s %4s %4s %4s %3s %1s %7s %4s %4s %s"
def
getheader
(
self
):
body
=
(
'Start'
,
'WIn'
,
'WOut'
,
'WEnd'
,
'Tot'
,
'S'
,
'OSize'
,
'Code'
,
'Act'
,
'URL'
)
return
self
.
fmt
%
body
class
StartupRequest
(
Request
):
def
endstage
(
self
):
return
"U"
def
total
(
self
):
return
0
class
Cumulative
:
def
__init__
(
self
,
url
):
self
.
url
=
url
self
.
times
=
[]
self
.
hangs
=
0
self
.
allelapsed
=
None
def
put
(
self
,
request
):
elapsed
=
request
.
elapsed
if
elapsed
==
"I"
:
self
.
hangs
=
self
.
hangs
+
1
self
.
times
.
append
(
elapsed
)
def
all
(
self
):
if
self
.
allelapsed
==
None
:
self
.
allelapsed
=
[]
for
elapsed
in
self
.
times
:
self
.
allelapsed
.
append
(
elapsed
)
self
.
allelapsed
.
sort
()
return
self
.
allelapsed
def
__str__
(
self
):
body
=
(
self
.
hangs
,
self
.
hits
(),
self
.
total
(),
self
.
max
(),
self
.
min
(),
self
.
median
(),
self
.
mean
(),
self
.
url
)
return
self
.
fmt
%
body
def
getheader
(
self
):
return
self
.
fmt
%
(
'Hangs'
,
'Hits'
,
'Total'
,
'Max'
,
'Min'
,
'Median'
,
'Mean'
,
'URL'
)
fmt
=
"%5s %5s %5s %5s %5s %6s %5s %s"
def
hits
(
self
):
return
len
(
self
.
times
)
def
max
(
self
):
return
max
(
self
.
all
())
def
min
(
self
):
return
min
(
self
.
all
())
def
mean
(
self
):
l
=
len
(
self
.
times
)
if
l
==
0
:
return
"I"
else
:
t
=
self
.
total
()
if
t
==
"I"
:
return
"I"
return
t
/
l
def
median
(
self
):
all
=
self
.
all
()
l
=
len
(
all
)
if
l
==
0
:
return
"I"
else
:
if
l
==
1
:
return
all
[
0
]
elif
l
%
2
!=
0
:
i
=
l
/
2
+
1
return
all
[
i
]
else
:
i
=
l
/
2
-
1
i2
=
i
+
1
v1
=
all
[
i
]
v2
=
all
[
i2
]
if
isinstance
(
v1
,
StringType
)
or
isinstance
(
v2
,
StringType
):
return
"I"
else
:
return
(
v1
+
v2
)
/
2
def
total
(
self
):
t
=
0
all
=
self
.
all
()
for
elapsed
in
all
:
if
elapsed
==
"I"
:
continue
t
=
t
+
elapsed
return
t
def
parsebigmlogline
(
line
):
tup
=
line
.
split
(
None
,
3
)
if
len
(
tup
)
==
3
:
code
,
id
,
timestr
=
tup
return
code
,
id
,
timestr
,
''
elif
len
(
tup
)
==
4
:
return
tup
else
:
return
None
def
get_earliest_file_data
(
files
):
temp
=
{}
earliest_fromepoch
=
0
earliest
=
None
retn
=
None
for
file
in
files
:
line
=
file
.
readline
()
if
not
line
:
continue
linelen
=
len
(
line
)
line
=
line
.
strip
()
tup
=
parsebigmlogline
(
line
)
if
tup
is
None
:
print
"Could not interpret line: %s"
%
line
continue
code
,
id
,
timestr
,
desc
=
tup
timestr
=
timestr
.
strip
()
fromepoch
=
getdate
(
timestr
)
temp
[
file
]
=
linelen
if
earliest_fromepoch
==
0
or
fromepoch
<
earliest_fromepoch
:
earliest_fromepoch
=
fromepoch
earliest
=
file
retn
=
[
code
,
id
,
fromepoch
,
desc
]
for
file
,
linelen
in
temp
.
items
():
if
file
is
not
earliest
:
file
.
seek
(
file
.
tell
()
-
linelen
)
return
retn
def
get_requests
(
files
,
start
=
None
,
end
=
None
,
statsfname
=
None
,
writestats
=
None
,
readstats
=
None
):
finished
=
[]
unfinished
=
{}
if
readstats
:
fp
=
open
(
statsfname
,
'r'
)
u
=
cPickle
.
Unpickler
(
fp
)
requests
=
u
.
load
()
fp
.
close
()
del
u
del
fp
else
:
while
1
:
tup
=
get_earliest_file_data
(
files
)
if
tup
is
None
:
break
code
,
id
,
fromepoch
,
desc
=
tup
if
start
is
not
None
and
fromepoch
<
start
:
continue
if
end
is
not
None
and
fromepoch
>
end
:
break
if
code
==
'U'
:
finished
.
extend
(
unfinished
.
values
())
unfinished
.
clear
()
request
=
StartupRequest
()
request
.
url
=
desc
request
.
start
=
int
(
fromepoch
)
finished
.
append
(
request
)
continue
request
=
unfinished
.
get
(
id
)
if
request
is
None
:
if
code
!=
"B"
:
continue
# garbage at beginning of file
request
=
Request
()
for
pending_req
in
unfinished
.
values
():
pending_req
.
active
=
pending_req
.
active
+
1
unfinished
[
id
]
=
request
t
=
int
(
fromepoch
)
try
:
request
.
put
(
code
,
t
,
desc
)
except
:
print
"Unable to handle entry: %s %s %s"
%
(
code
,
t
,
desc
)
if
request
.
isfinished
():
del
unfinished
[
id
]
finished
.
append
(
request
)
finished
.
extend
(
unfinished
.
values
())
requests
=
finished
if
writestats
:
fp
=
open
(
statsfname
,
'w'
)
p
=
cPickle
.
Pickler
(
fp
)
p
.
dump
(
requests
)
fp
.
close
()
del
p
del
fp
return
requests
def
analyze
(
requests
,
top
,
sortf
,
start
=
None
,
end
=
None
,
mode
=
'cumulative'
,
resolution
=
60
,
urlfocusurl
=
None
,
urlfocustime
=
60
):
if
mode
==
'cumulative'
:
cumulative
=
{}
for
request
in
requests
:
url
=
request
.
url
stats
=
cumulative
.
get
(
url
)
if
stats
is
None
:
stats
=
Cumulative
(
url
)
cumulative
[
url
]
=
stats
stats
.
put
(
request
)
requests
=
cumulative
.
values
()
requests
.
sort
(
sortf
)
write
(
requests
,
top
)
elif
mode
==
'timed'
:
computed_start
=
requests
[
0
].
start
computed_end
=
requests
[
-
1
].
t_end
if
start
and
end
:
timewrite
(
requests
,
start
,
end
,
resolution
)
if
start
and
not
end
:
timewrite
(
requests
,
start
,
computed_end
,
resolution
)
if
end
and
not
start
:
timewrite
(
requests
,
computed_start
,
end
,
resolution
)
if
not
end
and
not
start
:
timewrite
(
requests
,
computed_start
,
computed_end
,
resolution
)
elif
mode
==
'urlfocus'
:
requests
.
sort
(
sortf
)
urlfocuswrite
(
requests
,
urlfocusurl
,
urlfocustime
)
else
:
requests
.
sort
(
sortf
)
write
(
requests
,
top
)
def
urlfocuswrite
(
requests
,
url
,
t
):
l
=
[]
i
=
0
for
request
in
requests
:
if
request
.
url
==
url
:
l
.
append
(
i
)
i
=
i
+
1
before
=
{}
after
=
{}
x
=
0
for
n
in
l
:
x
=
x
+
1
r
=
requests
[
n
]
start
=
r
.
start
earliest
=
start
-
t
latest
=
start
+
t
print
'URLs invoked %s seconds before and after %s (#%s, %s)'
%
\
(
t
,
url
,
x
,
r
.
shortprettystart
())
print
'---'
i
=
-
1
for
request
in
requests
:
i
=
i
+
1
if
request
.
start
<
earliest
:
continue
if
request
.
start
>
latest
:
break
if
n
==
i
:
# current request
print
'%3d'
%
(
request
.
start
-
start
),
print
'%s'
%
(
request
.
shortprettystart
()),
print
request
.
url
continue
if
request
.
start
<=
start
:
if
before
.
get
(
i
):
before
[
i
]
=
before
[
i
]
+
1
else
:
before
[
i
]
=
1
if
request
.
start
>
start
:
if
after
.
get
(
i
):
after
[
i
]
=
after
[
i
]
+
1
else
:
after
[
i
]
=
1
print
'%3d'
%
(
request
.
start
-
start
),
print
'%s'
%
(
request
.
shortprettystart
()),
print
request
.
url
print
print
(
'Summary of URLs invoked before (and at the same time as) %s '
'(times, url)'
%
url
)
before
=
before
.
items
()
before
.
sort
()
for
k
,
v
in
before
:
print
v
,
requests
[
k
].
url
print
print
'Summary of URLs invoked after %s (times, url)'
%
url
after
=
after
.
items
()
after
.
sort
()
for
k
,
v
in
after
:
print
v
,
requests
[
k
].
url
def
write
(
requests
,
top
=
0
):
if
len
(
requests
)
==
0
:
print
"No data.
\
n
"
return
i
=
0
header
=
requests
[
0
].
getheader
()
print
header
for
stat
in
requests
:
i
=
i
+
1
if
verbose
:
print
str
(
stat
)
else
:
print
str
(
stat
)[:
78
]
if
i
==
top
:
break
def
getdate
(
val
):
try
:
val
=
val
.
strip
()
year
,
month
,
day
=
int
(
val
[:
4
]),
int
(
val
[
5
:
7
]),
int
(
val
[
8
:
10
])
hour
,
minute
,
second
=
int
(
val
[
11
:
13
]),
int
(
val
[
14
:
16
]),
int
(
val
[
17
:
19
])
t
=
time
.
mktime
((
year
,
month
,
day
,
hour
,
minute
,
second
,
0
,
0
,
-
1
))
return
t
except
:
raise
ProfileException
,
"bad date %s"
%
val
def
getTimeslice
(
period
,
utime
):
low
=
int
(
math
.
floor
(
utime
))
-
period
+
1
high
=
int
(
math
.
ceil
(
utime
))
+
1
for
x
in
range
(
low
,
high
):
if
x
%
period
==
0
:
return
x
def
timewrite
(
requests
,
start
,
end
,
resolution
):
print
"Start: %s End: %s Resolution: %d secs"
%
\
(
tick2str
(
start
),
tick2str
(
end
),
resolution
)
print
"-"
*
78
print
print
"Date/Time #requests requests/second"
d
=
{}
max
=
0
min
=
None
for
r
in
requests
:
t
=
r
.
start
slice
=
getTimeslice
(
resolution
,
t
)
if
slice
>
max
:
max
=
slice
if
(
min
is
None
)
or
(
slice
<
min
):
min
=
slice
if
d
.
has_key
(
slice
):
d
[
slice
]
=
d
[
slice
]
+
1
else
:
d
[
slice
]
=
1
num
=
0
hits
=
0
avg_requests
=
None
max_requests
=
0
for
slice
in
range
(
min
,
max
,
resolution
):
num
=
d
.
get
(
slice
,
0
)
if
num
>
max_requests
:
max_requests
=
num
hits
=
hits
+
num
if
avg_requests
is
None
:
avg_requests
=
num
else
:
avg_requests
=
(
avg_requests
+
num
)
/
2
s
=
tick2str
(
slice
)
s
=
s
+
" %6d %4.2lf"
%
(
num
,
num
*
1.0
/
resolution
)
print
s
print
'='
*
78
print
" Peak: %6d %4.2lf"
%
\
(
max_requests
,
max_requests
*
1.0
/
resolution
)
print
" Avg: %6d %4.2lf"
%
\
(
avg_requests
,
avg_requests
*
1.0
/
resolution
)
print
"Total: %6d n/a "
%
(
hits
)
def
tick2str
(
t
):
return
time
.
strftime
(
'%Y-%m-%dT%H:%M:%S'
,
time
.
localtime
(
t
))
def
codesort
(
v1
,
v2
):
v1
=
v1
.
endstage
()
v2
=
v2
.
endstage
()
if
v1
==
v2
:
return
0
if
v1
==
"B"
:
return
-
1
# v1 is smaller than v2
if
v1
==
"I"
:
if
v2
==
"B"
:
return
1
# v1 is larger than v2
else
:
return
-
1
if
v1
==
"A"
:
if
v2
in
[
'B'
,
'I'
]:
return
1
else
:
return
-
1
if
v1
==
"E"
:
return
1
class
Sort
:
def
__init__
(
self
,
fname
,
ascending
=
0
):
self
.
fname
=
fname
self
.
ascending
=
ascending
def
__call__
(
self
,
i1
,
i2
):
f1
=
getattr
(
i1
,
self
.
fname
)
f2
=
getattr
(
i2
,
self
.
fname
)
if
callable
(
f1
):
f1
=
f1
()
if
callable
(
f2
):
f2
=
f2
()
if
f1
<
f2
:
if
self
.
ascending
:
return
-
1
else
:
return
1
elif
f1
==
f2
:
return
0
else
:
if
self
.
ascending
:
return
1
else
:
return
-
1
def
detailedusage
():
details
=
usage
(
0
)
pname
=
sys
.
argv
[
0
]
details
=
details
+
"""
Reports are of four types: cumulative, detailed, timed, or urlfocus. The
default is cumulative. Data is taken from one or more Zope detailed request
logs (-M logs, aka 'big M' logs) or from a preprocessed statistics file.
For cumulative reports, each line in the profile indicates information
about a Zope method (URL) collected via a detailed request log.
For detailed reports, each line in the profile indicates information about
a single request.
For timed reports, each line in the profile indicates informations about
the number of requests and the number of requests/second for a period of time.
For urlfocus reports, ad-hoc information about requests surrounding the
specified url is given.
Each 'filename' is a path to a '-M' log that contains detailed request data.
Multiple input files can be analyzed at the same time by providing the path
to each file. (Analyzing multiple big M log files at once is useful if you
have more than one Zope client on a single machine and you'd like to
get an overview of all Zope activity on that machine).
If you wish to make multiple analysis runs against the same input data, you
may want to use the --writestats option. The --writestats option creates a
file which holds preprocessed data representing the specfified input files.
Subsequent runs (for example with a different sort spec) will be much
faster if the --readstats option is used to specify a preprocessed stats
file instead of actual input files because the logfile parse step is skipped.
If a 'sort' value is specified, sort the profile info by the spec. The sort
order is descending unless indicated. The default cumulative sort spec is
'total'. The default detailed sort spec is 'start'.
For cumulative reports, the following sort specs are accepted:
'hits' -- the number of hits against the method
'hangs' -- the number of unfinished requests to the method
'max' -- the maximum time in secs taken by a request to this method
'min' -- the minimum time in secs taken by a request to this method
'mean' -- the mean time in secs taken by a request to this method
'median' -- the median time in secs taken by a request to this method
'total' -- the total time in secs across all requests to this method
'url' -- the URL/method name (ascending)
For detailed (non-cumulative) reports, the following sort specs are accepted:
'start' -- the start time of the request to ZServer (ascending)
'win' -- the num of secs ZServer spent waiting for input from client
'wout' -- the secs ZServer spent waiting for output from ZPublisher
'wend' -- the secs ZServer spent sending data to the client
'total' -- the secs taken for the request from begin to end
'endstage' -- the last successfully completed request stage (B, I, A, E)
'osize' -- the size in bytes of output provided by ZPublisher
'httpcode' -- the HTTP response code provided by ZPublisher (ascending)
'active' -- total num of requests pending at the end of this request
'url' -- the URL/method name (ascending)
For timed and urlfocus reports, there are no sort specs allowed.
If the 'top' argument is specified, only report on the top 'n' entries in
the profile (as per the sort). The default is to show all data in the profile.
If the 'verbose' argument is specified, do not trim url to fit into 80 cols.
If the 'today' argument is specified, limit results to hits received today.
If the 'daysago' argument is specified, limit results to hits received n days ago.
The 'resolution' argument is used only for timed reports and specifies the
number of seconds between consecutive lines in the report
(default is 60 seconds).
The 'urlfocustime' argument is used only for urlfocus reports and specifies the
number of seconds to target before and after the URL provided in urlfocus mode.
(default is 10 seconds).
If the 'start' argument is specified in the form 'DD/MM/YYYY HH:MM:SS' (UTC),
limit results to hits received after this date/time.
If the 'end' argument is specified in the form 'DD/MM/YYYY HH:MM:SS' (UTC),
limit results to hits received before this date/time.
'start' and 'end' arguments are not honored when request stats are obtained
via the --readstats argument.
Examples:
%(pname)s debug.log
Show cumulative report statistics for information in the file 'debug.log',
by default sorted by 'total'.
%(pname)s debug.log --detailed
Show detailed report statistics sorted by 'start' (by default).
%(pname)s debug.log debug2.log --detailed
Show detailed report statistics for both logs sorted by 'start'
(by default).
%(pname)s debug.log --cumulative --sort=mean --today --verbose
Show cumulative report statistics sorted by mean for entries in the log
which happened today, and do not trim the URL in the resulting report.
%(pname)s debug.log --cumulative --sort=mean --daysago=3 --verbose
Show cumulative report statistics sorted by mean for entries in the log
which happened three days ago, and do not trim the URL in the resulting report.
%(pname)s debug.log --urlfocus='/manage_main' --urlfocustime=60
Show 'urlfocus' report which displays statistics about requests
surrounding the invocation of '/manage_main'. Focus on the time periods
60 seconds before and after each invocation of the '/manage_main' URL.
%(pname)s debug.log --detailed --start='2001/05/10 06:00:00'
--end='2001/05/11 23:00:00'
Show detailed report statistics for entries in 'debug.log' which
begin after 6am UTC on May 10, 2001 and which end before
11pm UTC on May 11, 2001.
%(pname)s debug.log --timed --resolution=300 --start='2001/05/10 06:00:00'
--end='2001/05/11 23:00:00'
Show timed report statistics for entries in the log for one day
with a resolution of 5 minutes
%(pname)s debug.log --top=100 --sort=max
Show cumulative report of the the 'top' 100 methods sorted by maximum
elapsed time.
%(pname)s debug.log debug2.log --writestats='requests.stat'
Write stats file for debug.log and debug2.log into 'requests.stat' and
show default report.
%(pname)s --readstats='requests.stat' --detailed
Read from 'requests.stat' stats file (instead of actual -M log files)
and show detailed report against this data."""
%
{
'pname'
:
pname
}
return
details
def
usage
(
basic
=
1
):
usage
=
(
"""
Usage: %s filename1 [filename2 ...]
[--cumulative | --detailed | [--timed --resolution=seconds]]
[--sort=spec]
[--top=n]
[--verbose]
[--today | [--start=date] [--end=date] | --daysago=n ]
[--writestats=filename | --readstats=filename]
[--urlfocus=url]
[--urlfocustime=seconds]
[--help]
Provides a profile of one or more Zope "-M" request log files.
"""
%
sys
.
argv
[
0
]
)
if
basic
==
1
:
usage
=
usage
+
"""
If the --help argument is given, detailed usage docs are provided."""
return
usage
def
main
():
if
len
(
sys
.
argv
)
==
1
:
print
usage
()
sys
.
exit
(
0
)
if
sys
.
argv
[
1
]
==
'--help'
:
print
detailedusage
();
sys
.
exit
(
0
)
mode
=
'cumulative'
sortby
=
None
trim
=
0
top
=
0
verbose
=
0
start
=
None
end
=
None
resolution
=
60
urlfocustime
=
10
urlfocusurl
=
None
statsfname
=
None
readstats
=
0
writestats
=
0
files
=
[]
i
=
1
for
arg
in
sys
.
argv
[
1
:]:
if
arg
[:
2
]
!=
'--'
:
if
arg
[
-
3
:]
==
'.gz'
and
globals
().
has_key
(
'gzip'
):
files
.
append
(
gzip
.
GzipFile
(
arg
,
'r'
))
else
:
files
.
append
(
open
(
arg
))
sys
.
argv
.
remove
(
arg
)
i
=
i
+
1
try
:
opts
,
extra
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
''
,
[
'sort='
,
'top='
,
'help'
,
'verbose'
,
'today'
,
'cumulative'
,
'detailed'
,
'timed'
,
'start='
,
'end='
,
'resolution='
,
'writestats='
,
'daysago='
,
'readstats='
,
'urlfocus='
,
'urlfocustime='
]
)
for
opt
,
val
in
opts
:
if
opt
==
'--readstats'
:
statsfname
=
val
readstats
=
1
elif
opt
==
'--writestats'
:
statsfname
=
val
writestats
=
1
if
opt
==
'--sort'
:
sortby
=
val
if
opt
==
'--top'
:
top
=
int
(
val
)
if
opt
==
'--help'
:
print
detailedusage
();
sys
.
exit
(
0
)
if
opt
==
'--verbose'
:
verbose
=
1
if
opt
==
'--resolution'
:
resolution
=
int
(
val
)
if
opt
==
'--today'
:
now
=
time
.
localtime
(
time
.
time
())
# for testing - now = (2001, 04, 19, 0, 0, 0, 0, 0, -1)
start
=
list
(
now
)
start
[
3
]
=
start
[
4
]
=
start
[
5
]
=
0
start
=
time
.
mktime
(
start
)
end
=
list
(
now
)
end
[
3
]
=
23
;
end
[
4
]
=
59
;
end
[
5
]
=
59
end
=
time
.
mktime
(
end
)
if
opt
==
'--daysago'
:
now
=
time
.
localtime
(
time
.
time
()
-
int
(
val
)
*
3600
*
24
)
# for testing - now = (2001, 04, 19, 0, 0, 0, 0, 0, -1)
start
=
list
(
now
)
start
[
3
]
=
start
[
4
]
=
start
[
5
]
=
0
start
=
time
.
mktime
(
start
)
end
=
list
(
now
)
end
[
3
]
=
23
;
end
[
4
]
=
59
;
end
[
5
]
=
59
end
=
time
.
mktime
(
end
)
if
opt
==
'--start'
:
start
=
getdate
(
val
)
if
opt
==
'--end'
:
end
=
getdate
(
val
)
if
opt
==
'--detailed'
:
mode
=
'detailed'
d_sortby
=
sortby
if
opt
==
'--cumulative'
:
mode
=
'cumulative'
if
opt
==
'--timed'
:
mode
=
'timed'
if
opt
==
'--urlfocus'
:
mode
=
'urlfocus'
urlfocusurl
=
val
if
opt
==
'--urlfocustime'
:
urlfocustime
=
int
(
val
)
validcumsorts
=
[
'url'
,
'hits'
,
'hangs'
,
'max'
,
'min'
,
'median'
,
'mean'
,
'total'
]
validdetsorts
=
[
'start'
,
'win'
,
'wout'
,
'wend'
,
'total'
,
'endstage'
,
'isize'
,
'osize'
,
'httpcode'
,
'active'
,
'url'
]
if
mode
==
'cumulative'
:
if
sortby
is
None
:
sortby
=
'total'
assert
sortby
in
validcumsorts
,
(
sortby
,
mode
,
validcumsorts
)
if
sortby
in
[
'url'
]:
sortf
=
Sort
(
sortby
,
ascending
=
1
)
else
:
sortf
=
Sort
(
sortby
)
elif
mode
==
'detailed'
:
if
sortby
is
None
:
sortby
=
'start'
assert
sortby
in
validdetsorts
,
(
sortby
,
mode
,
validdetsorts
)
if
sortby
in
[
'start'
,
'url'
,
'httpcode'
]:
sortf
=
Sort
(
sortby
,
ascending
=
1
)
elif
sortby
==
'endstage'
:
sortf
=
codesort
else
:
sortf
=
Sort
(
sortby
)
elif
mode
==
'timed'
:
sortf
=
None
elif
mode
==
'urlfocus'
:
sortf
=
Sort
(
'start'
,
ascending
=
1
)
else
:
raise
'Invalid mode'
req
=
get_requests
(
files
,
start
,
end
,
statsfname
,
writestats
,
readstats
)
analyze
(
req
,
top
,
sortf
,
start
,
end
,
mode
,
resolution
,
urlfocusurl
,
urlfocustime
)
except
AssertionError
,
val
:
a
=
"%s is not a valid %s sort spec, use one of %s"
print
a
%
(
val
[
0
],
val
[
1
],
val
[
2
])
sys
.
exit
(
0
)
except
getopt
.
error
,
val
:
print
val
sys
.
exit
(
0
)
except
ProfileException
,
val
:
print
val
sys
.
exit
(
0
)
except
SystemExit
:
sys
.
exit
(
0
)
except
:
import
traceback
traceback
.
print_exc
()
print
usage
()
sys
.
exit
(
0
)
if
__name__
==
'__main__'
:
main
()
src/Zope2/utilities/tracelog.py
deleted
100644 → 0
View file @
aa5c1b27
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Yet another lag analysis tool
"""
import
datetime
,
optparse
,
sys
class
Request
:
output_bytes
=
'-'
def
__init__
(
self
,
start
,
method
,
url
):
self
.
method
=
method
self
.
url
=
url
self
.
start
=
start
self
.
state
=
'input'
def
I
(
self
,
input_time
,
input_bytes
):
self
.
input_time
=
input_time
self
.
input_bytes
=
input_bytes
self
.
state
=
'app'
def
A
(
self
,
app_time
,
response
,
output_bytes
):
self
.
app_time
=
app_time
self
.
response
=
response
self
.
output_bytes
=
output_bytes
self
.
state
=
'output'
def
E
(
self
,
end
):
self
.
end
=
end
@
property
def
app_seconds
(
self
):
return
(
self
.
app_time
-
self
.
input_time
).
seconds
@
property
def
total_seconds
(
self
):
return
(
self
.
end
-
self
.
start
).
seconds
class
Times
:
tid
=
1
l
def
__init__
(
self
):
self
.
times
=
[]
self
.
hangs
=
0
Times
.
tid
+=
1
self
.
tid
=
Times
.
tid
# generate a unique id
def
finished
(
self
,
request
):
self
.
times
.
append
(
request
.
app_seconds
)
def
hung
(
self
):
self
.
hangs
+=
1
def
impact
(
self
):
times
=
self
.
times
if
not
times
:
self
.
median
=
self
.
mean
=
self
.
impact
=
0
return
0
self
.
times
.
sort
()
n
=
len
(
times
)
if
n
%
2
:
m
=
times
[(
n
+
1
)
/
2
-
1
]
else
:
m
=
.
5
*
(
times
[
n
/
2
]
+
times
[
n
/
2
-
1
])
self
.
median
=
m
self
.
mean
=
sum
(
times
)
/
n
self
.
impact
=
self
.
mean
*
(
n
+
self
.
hangs
)
return
self
.
impact
def
__str__
(
self
):
times
=
self
.
times
if
not
times
:
return
" 0 %5d"
%
(
self
.
hangs
)
n
=
len
(
times
)
m
=
self
.
median
return
"%9.1f %5d %6.0f %6.2f %6.2f %6.0f %5d"
%
(
self
.
impact
,
n
,
times
[
0
],
m
,
self
.
mean
,
times
[
-
1
],
self
.
hangs
)
def
html
(
self
):
times
=
self
.
times
if
not
times
:
print
td
(
''
,
0
,
''
,
''
,
''
,
''
,
self
.
hangs
)
else
:
n
=
len
(
times
)
m
=
self
.
median
impact
=
'<a name="u%s">%s'
%
(
self
.
tid
,
self
.
impact
)
print
td
(
impact
,
n
,
times
[
0
],
m
,
self
.
mean
,
times
[
-
1
],
self
.
hangs
)
def
parsedt
(
s
):
date
,
time
=
s
.
split
(
'T'
)
return
datetime
.
datetime
(
*
(
map
(
int
,
date
.
split
(
'-'
))
+
map
(
int
,
time
.
split
(
':'
))
))
def
main
(
args
=
None
):
if
args
is
None
:
args
=
sys
.
argv
[
1
:]
options
,
args
=
parser
.
parse_args
(
args
)
if
options
.
event_log
:
restarts
=
find_restarts
(
options
.
event_log
)
else
:
restarts
=
[]
restarts
.
append
(
datetime
.
datetime
.
utcnow
()
+
datetime
.
timedelta
(
1000
))
if
options
.
html
:
print_app_requests
=
print_app_requests_html
output_minute
=
output_minute_html
output_stats
=
output_stats_html
minutes_header
=
minutes_header_html
minutes_footer
=
minutes_footer_html
print
'<html title="trace log statistics"><body>'
else
:
print_app_requests
=
print_app_requests_text
output_minute
=
output_minute_text
output_stats
=
output_stats_text
minutes_header
=
minutes_header_text
minutes_footer
=
minutes_footer_text
urls
=
{}
[
file
]
=
args
lmin
=
ldt
=
None
requests
=
{}
input
=
apps
=
output
=
n
=
0
spr
=
spa
=
0.0
restart
=
restarts
.
pop
(
0
)
minutes_header
()
remove_prefix
=
options
.
remove_prefix
for
record
in
open
(
file
):
record
=
record
.
split
()
typ
,
rid
,
dt
=
record
[:
3
]
min
=
dt
[:
-
3
]
dt
=
parsedt
(
dt
)
if
dt
==
restart
:
continue
while
dt
>
restart
:
print_app_requests
(
requests
,
ldt
,
options
.
old_requests
,
options
.
app_requests
,
urls
,
"
\
n
Left over:"
)
record_hung
(
urls
,
requests
)
requests
=
{}
input
=
apps
=
output
=
n
=
0
spr
=
spa
=
0.0
restart
=
restarts
.
pop
(
0
)
ldt
=
dt
if
min
!=
lmin
:
if
lmin
is
not
None
:
output_minute
(
lmin
,
requests
,
input
,
apps
,
output
,
n
,
spr
,
spa
)
if
apps
>
options
.
apps
:
print_app_requests
(
requests
,
dt
,
options
.
old_requests
,
options
.
app_requests
,
urls
,
)
lmin
=
min
spr
=
0.0
spa
=
0.0
n
=
0
if
typ
==
'B'
:
if
rid
in
requests
:
request
=
requests
[
rid
]
if
request
.
state
==
'output'
:
output
-=
1
elif
request
.
state
==
'app'
:
apps
-=
1
else
:
input
-=
1
input
+=
1
request
=
Request
(
dt
,
*
record
[
3
:
5
])
if
remove_prefix
and
request
.
url
.
startswith
(
remove_prefix
):
request
.
url
=
request
.
url
[
len
(
remove_prefix
):]
requests
[
rid
]
=
request
times
=
urls
.
get
(
request
.
url
)
if
times
is
None
:
times
=
urls
[
request
.
url
]
=
Times
()
elif
typ
==
'I'
:
if
rid
in
requests
:
input
-=
1
apps
+=
1
requests
[
rid
].
I
(
dt
,
record
[
3
])
elif
typ
==
'A'
:
if
rid
in
requests
:
apps
-=
1
output
+=
1
requests
[
rid
].
A
(
dt
,
*
record
[
3
:
5
])
elif
typ
==
'E'
:
if
rid
in
requests
:
output
-=
1
request
=
requests
.
pop
(
rid
)
request
.
E
(
dt
)
spr
+=
request
.
total_seconds
spa
+=
request
.
app_seconds
n
+=
1
times
=
urls
[
request
.
url
]
times
.
finished
(
request
)
else
:
print
'WTF'
,
record
print_app_requests
(
requests
,
dt
,
options
.
old_requests
,
options
.
app_requests
,
urls
,
"Left over:"
)
minutes_footer
()
output_stats
(
urls
)
if
options
.
html
:
print
'</body></html>'
def
output_stats_text
(
urls
):
print
print
'URL statistics:'
print
" Impact count min median mean max hangs"
print
"========= ===== ====== ====== ====== ====== ====="
urls
=
[(
times
.
impact
(),
url
,
times
)
for
(
url
,
times
)
in
urls
.
iteritems
()
]
urls
.
sort
()
urls
.
reverse
()
for
(
_
,
url
,
times
)
in
urls
:
if
times
.
impact
>
0
or
times
.
hangs
:
print
times
,
url
def
output_stats_html
(
urls
):
print
print
'URL statistics:'
print
'<table border="1">'
print
'<tr><th>Impact</th><th>count</th><th>min</th>'
print
'<th>median</th><th>mean</th><th>max</th><th>hangs</th></tr>'
urls
=
[(
times
.
impact
(),
url
,
times
)
for
(
url
,
times
)
in
urls
.
iteritems
()
]
urls
.
sort
()
urls
.
reverse
()
for
(
_
,
url
,
times
)
in
urls
:
if
times
.
impact
>
0
or
times
.
hangs
:
print
'<tr>'
times
.
html
()
print
td
(
url
)
print
'</tr>'
print
'</table>'
def
minutes_header_text
():
print
print
" minute req input app output"
print
"================ ===== ===== ===== ======"
def
minutes_footer_text
():
print
def
minutes_header_html
():
print
'<table border="2">'
print
"<tr>"
print
'<th>Minute</th>'
print
'<th>Requests</th>'
print
'<th>Requests inputing</th>'
print
'<th>Requests executing or waiting</th>'
print
'<th>Requests outputing</th>'
print
'<th>Requests completed</th>'
print
'<th>Mean Seconds Per Request Total</th>'
print
'<th>Mean Seconds Per Request in App</th>'
print
"</tr>"
def
minutes_footer_html
():
print
'</table>'
def
output_minute_text
(
lmin
,
requests
,
input
,
apps
,
output
,
n
,
spr
,
spa
):
print
lmin
.
replace
(
'T'
,
' '
),
"%5d I=%3d A=%3d O=%5d "
%
(
len
(
requests
),
input
,
apps
,
output
),
if
n
:
print
"N=%4d %10.2f %10.2f"
%
(
n
,
spr
/
n
,
spa
/
n
)
else
:
print
def
td
(
*
values
):
return
''
.
join
([(
"<td>%s</td>"
%
s
)
for
s
in
values
])
def
output_minute_html
(
lmin
,
requests
,
input
,
apps
,
output
,
n
,
spr
,
spa
):
print
'<tr>'
apps
=
'<font size="+2"><strong>%s</strong></font>'
%
apps
print
td
(
lmin
.
replace
(
'T'
,
' '
),
len
(
requests
),
input
,
apps
,
output
)
if
n
:
print
td
(
n
,
"%10.2f"
%
(
spr
/
n
),
"%10.2f"
%
(
spa
/
n
))
print
'</tr>'
def
find_restarts
(
event_log
):
result
=
[]
for
l
in
open
(
event_log
):
if
l
.
strip
().
endswith
(
"Zope Ready to handle requests"
):
result
.
append
(
parsedt
(
l
.
split
()[
0
]))
return
result
def
record_hung
(
urls
,
requests
):
for
request
in
requests
.
itervalues
():
times
=
urls
.
get
(
request
.
url
)
if
times
is
None
:
times
=
urls
[
request
.
url
]
=
Times
()
times
.
hung
()
def
print_app_requests_text
(
requests
,
dt
,
min_seconds
,
max_requests
,
urls
,
label
=
''
):
requests
=
[
((
dt
-
request
.
input_time
).
seconds
,
request
)
for
request
in
requests
.
values
()
if
request
.
state
==
'app'
]
urls
=
{}
for
s
,
request
in
requests
:
urls
[
request
.
url
]
=
urls
.
get
(
request
.
url
,
0
)
+
1
requests
.
sort
()
requests
.
reverse
()
for
s
,
request
in
requests
[:
max_requests
]:
if
s
<
min_seconds
:
continue
if
label
:
print
label
label
=
''
url
=
request
.
url
repeat
=
urls
[
url
]
if
repeat
>
1
:
print
s
,
"R=%d"
%
repeat
,
url
else
:
print
s
,
url
def
print_app_requests_html
(
requests
,
dt
,
min_seconds
,
max_requests
,
allurls
,
label
=
''
):
requests
=
[
((
dt
-
request
.
input_time
).
seconds
,
request
)
for
request
in
requests
.
values
()
if
request
.
state
==
'app'
]
urls
=
{}
for
s
,
request
in
requests
:
urls
[
request
.
url
]
=
urls
.
get
(
request
.
url
,
0
)
+
1
requests
.
sort
()
requests
.
reverse
()
printed
=
False
for
s
,
request
in
requests
[:
max_requests
]:
if
s
<
min_seconds
:
continue
if
label
:
print
label
label
=
''
if
not
printed
:
minutes_footer_html
()
print
'<table border="1">'
print
'<tr><th>age</th><th>R</th><th>url</th></tr>'
printed
=
True
url
=
request
.
url
repeat
=
urls
[
url
]
print
'<tr>'
if
repeat
<=
1
:
repeat
=
''
url
=
'<a href="#u%s">%s</a>'
%
(
allurls
[
url
].
tid
,
url
)
print
td
(
s
,
repeat
,
url
)
print
'</tr>'
if
printed
:
print
'</table>'
minutes_header_html
()
parser
=
optparse
.
OptionParser
(
"""
Usage: %prog [options] trace_log_file
Output trace log data showing:
- number of active requests,
- number of input requests (requests gathering input),
- number of application requests,
- number of output requests,
- number of requests completed in the minute shown,
- mean seconds per request and
- mean application seconds per request.
Note that we don't seem to be logging when a connection to the client
is broken, so the number of active requests, and especially the number
of outputing requests tends to grow over time. This is spurious.
Also, note that, unfortunately, application requests include requests
that are running in application threads and requests waiting to get an
application thread.
When application threads get above the app request threshold, then we
show the requests that have been waiting the longest.
"""
)
parser
.
add_option
(
"--app-request-threshold"
,
"-a"
,
dest
=
'apps'
,
type
=
"int"
,
default
=
10
,
help
=
"""
Number of pending application requests at which detailed request information
if printed.
"""
)
parser
.
add_option
(
"--app-requests"
,
"-r"
,
dest
=
'app_requests'
,
type
=
"int"
,
default
=
10
,
help
=
"""
How many requests to show when the maximum number of pending
apps is exceeded.
"""
)
parser
.
add_option
(
"--old-requests"
,
"-o"
,
dest
=
'old_requests'
,
type
=
"int"
,
default
=
10
,
help
=
"""
Number of seconds beyond which a request is considered old.
"""
)
parser
.
add_option
(
"--event-log"
,
"-e"
,
dest
=
'event_log'
,
help
=
"""
The name of an event log that goes with the trace log. This is used
to determine when the server is restarted, so that the running trace data structures can be reinitialized.
"""
)
parser
.
add_option
(
"--html"
,
dest
=
'html'
,
action
=
'store_true'
,
help
=
"""
Generate HTML output.
"""
)
parser
.
add_option
(
"--remove-prefix"
,
dest
=
'remove_prefix'
,
help
=
"""
A prefex to be removed from URLS.
"""
)
if
__name__
==
'__main__'
:
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment