Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Z
Zope
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
Zope
Commits
ae12ae9f
Commit
ae12ae9f
authored
Jan 18, 1999
by
Amos Latteier
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added back future producer stuff.
parent
1d9f3e9e
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
378 additions
and
312 deletions
+378
-312
ZServer/medusa/http_server.py
ZServer/medusa/http_server.py
+5
-27
ZServer/medusa/producers.py
ZServer/medusa/producers.py
+85
-8
ZServer/zope_handler.py
ZServer/zope_handler.py
+99
-121
lib/python/ZServer/medusa/http_server.py
lib/python/ZServer/medusa/http_server.py
+5
-27
lib/python/ZServer/medusa/producers.py
lib/python/ZServer/medusa/producers.py
+85
-8
lib/python/ZServer/zope_handler.py
lib/python/ZServer/zope_handler.py
+99
-121
No files found.
ZServer/medusa/http_server.py
View file @
ae12ae9f
...
@@ -9,7 +9,7 @@
...
@@ -9,7 +9,7 @@
# interested in using this software in a commercial context, or in
# interested in using this software in a commercial context, or in
# purchasing support, please contact the author.
# purchasing support, please contact the author.
RCS_ID
=
'$Id: http_server.py,v 1.
3 1999/01/15 02:27:04
amos Exp $'
RCS_ID
=
'$Id: http_server.py,v 1.
4 1999/01/18 22:44:49
amos Exp $'
# python modules
# python modules
import
os
import
os
...
@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
...
@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
current_request
=
None
current_request
=
None
channel_counter
=
counter
()
channel_counter
=
counter
()
writable
=
asynchat
.
async_chat
.
writable_future
def
__init__
(
self
,
server
,
conn
,
addr
):
def
__init__
(
self
,
server
,
conn
,
addr
):
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
...
@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
...
@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
return
return
# no handlers, so complain
# no handlers, so complain
r
.
error
(
404
)
r
.
error
(
404
)
def
writable
(
self
):
# this is just the normal async_chat 'writable', here for comparison
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
def
writable_for_proxy
(
self
):
def
writable_for_proxy
(
self
):
# this version of writable supports the idea of a 'stalled' producer
# this version of writable supports the idea of a 'stalled' producer
...
@@ -598,32 +595,13 @@ def crack_request (r):
...
@@ -598,32 +595,13 @@ def crack_request (r):
version
=
None
version
=
None
return
string
.
lower
(
REQUEST
.
group
(
1
)),
REQUEST
.
group
(
2
),
version
return
string
.
lower
(
REQUEST
.
group
(
1
)),
REQUEST
.
group
(
2
),
version
class
fifo
:
class
fifo
(
asynchat
.
fifo
):
def
__init__
(
self
,
list
=
None
):
if
not
list
:
self
.
list
=
[]
else
:
self
.
list
=
list
def
__len__
(
self
):
return
len
(
self
.
list
)
def
first
(
self
):
return
self
.
list
[
0
]
def
push_front
(
self
,
object
):
def
push_front
(
self
,
object
):
self
.
list
.
insert
(
0
,
object
)
self
.
list
.
insert
(
0
,
object
)
def
push
(
self
,
data
):
self
.
list
.
append
(
data
)
def
pop
(
self
):
if
self
.
list
:
result
=
self
.
list
[
0
]
del
self
.
list
[
0
]
return
(
1
,
result
)
else
:
return
(
0
,
None
)
def
compute_timezone_for_log
():
def
compute_timezone_for_log
():
if
time
.
daylight
:
if
time
.
daylight
:
...
...
ZServer/medusa/producers.py
View file @
ae12ae9f
# -*- Mode: Python; tab-width: 4 -*-
# -*- Mode: Python; tab-width: 4 -*-
RCS_ID
=
'$Id: producers.py,v 1.
3 1999/01/15 02:27:5
2 amos Exp $'
RCS_ID
=
'$Id: producers.py,v 1.
4 1999/01/18 22:45:2
2 amos Exp $'
import
string
import
string
...
@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
...
@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
For example, you can feed dynamically-produced output into the compressing
For example, you can feed dynamically-produced output into the compressing
producer, then wrap this with the 'chunked' transfer-encoding producer.
producer, then wrap this with the 'chunked' transfer-encoding producer.
Producer Interface:
All producers have a 'more' method. 'more' returns a string of output.
'more' can be called multiple times. When 'more' returns '', the producer
is exhausted.
Optional Future Producer Interface:
The future producer interface adds a 'ready' method to producers. This
allows future producers which may not be ready until after they are
created. Returning false means that a call to 'more' will not give you
useful information, right now, but will later. When a producer is exhausted,
it should return true for 'ready'. Producers which are not ready should raise
'NotReady' when their 'more' method is called.
Note: Not having a 'ready' method implies that a producer is always ready.
Note: Composite producers will probably have to consult their sub-produces
to ensure readiness.
Note: If you don't wish to use future producers nothing changes. Simply don't
call a producer's ready method. Everything works as before.
"""
"""
class
NotReady
(
Exception
):
"""Raised by future producers when their more method is called
when they are not ready."""
pass
class
simple_producer
:
class
simple_producer
:
"producer for a string"
"producer for a string"
def
__init__
(
self
,
data
,
buffer_size
=
1024
):
def
__init__
(
self
,
data
,
buffer_size
=
1024
):
...
@@ -55,9 +84,6 @@ class lines_producer:
...
@@ -55,9 +84,6 @@ class lines_producer:
def
__init__
(
self
,
lines
):
def
__init__
(
self
,
lines
):
self
.
lines
=
lines
self
.
lines
=
lines
def
ready
(
self
):
return
len
(
self
.
lines
)
def
more
(
self
):
def
more
(
self
):
if
self
.
lines
:
if
self
.
lines
:
chunk
=
self
.
lines
[:
50
]
chunk
=
self
.
lines
[:
50
]
...
@@ -89,6 +115,7 @@ class file_producer:
...
@@ -89,6 +115,7 @@ class file_producer:
else
:
else
:
return
data
return
data
# A simple output producer. This one does not [yet] have
# A simple output producer. This one does not [yet] have
# the safety feature builtin to the monitor channel: runaway
# the safety feature builtin to the monitor channel: runaway
# output will not be caught.
# output will not be caught.
...
@@ -98,9 +125,11 @@ class file_producer:
...
@@ -98,9 +125,11 @@ class file_producer:
class
output_producer
:
class
output_producer
:
"Acts like an output file; suitable for capturing sys.stdout"
"Acts like an output file; suitable for capturing sys.stdout"
def
__init__
(
self
):
def
__init__
(
self
):
self
.
data
=
''
self
.
data
=
''
self
.
closed
=
None
def
write
(
self
,
data
):
def
write
(
self
,
data
):
lines
=
string
.
splitfields
(
data
,
'
\
n
'
)
lines
=
string
.
splitfields
(
data
,
'
\
n
'
)
data
=
string
.
join
(
lines
,
'
\
r
\
n
'
)
data
=
string
.
join
(
lines
,
'
\
r
\
n
'
)
...
@@ -116,7 +145,7 @@ class output_producer:
...
@@ -116,7 +145,7 @@ class output_producer:
)
+
'
\
r
\
n
'
)
+
'
\
r
\
n
'
def
ready
(
self
):
def
ready
(
self
):
return
(
len
(
self
.
data
)
>
0
)
return
(
len
(
self
.
data
)
>
0
)
or
self
.
closed
def
flush
(
self
):
def
flush
(
self
):
pass
pass
...
@@ -131,13 +160,26 @@ class output_producer:
...
@@ -131,13 +160,26 @@ class output_producer:
return
result
return
result
else
:
else
:
return
''
return
''
def
close
(
self
):
self
.
closed
=
1
class
composite_producer
:
class
composite_producer
:
"combine a fifo of producers into one"
"combine a fifo of producers into one"
def
__init__
(
self
,
producers
):
def
__init__
(
self
,
producers
):
self
.
producers
=
producers
self
.
producers
=
producers
self
.
buffer
=
''
def
more
(
self
):
def
more
(
self
):
if
self
.
buffer
:
b
=
self
.
buffer
self
.
buffer
=
''
return
b
# we should only get here when not
# using the ready option
while
len
(
self
.
producers
):
while
len
(
self
.
producers
):
p
=
self
.
producers
.
first
()
p
=
self
.
producers
.
first
()
d
=
p
.
more
()
d
=
p
.
more
()
...
@@ -148,6 +190,21 @@ class composite_producer:
...
@@ -148,6 +190,21 @@ class composite_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
# This producer requires a buffer to ensure
# that it really is ready when it says so
if
self
.
buffer
or
len
(
self
.
producers
)
==
0
:
return
1
while
self
.
producers
.
ready
():
p
=
self
.
producers
.
first
()
d
=
p
.
more
()
if
d
:
self
.
buffer
=
d
else
:
self
.
producers
.
pop
()
if
self
.
buffer
or
len
(
self
.
producers
)
==
0
:
return
1
class
globbing_producer
:
class
globbing_producer
:
"""
"""
...
@@ -172,6 +229,13 @@ class globbing_producer:
...
@@ -172,6 +229,13 @@ class globbing_producer:
self
.
buffer
=
''
self
.
buffer
=
''
return
r
return
r
def
ready
(
self
):
# XXX doesn't in fact guarentee ready. Should probably
# redo this one like the composite producer... But
# it's not a big deal, cause refill_buffer will
# catch the NotReady exception...
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
class
hooked_producer
:
class
hooked_producer
:
"""
"""
...
@@ -197,6 +261,9 @@ class hooked_producer:
...
@@ -197,6 +261,9 @@ class hooked_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# correct. In the face of Strange Files, it is conceivable that
# correct. In the face of Strange Files, it is conceivable that
# reading a 'file' may produce an amount of data not matching that
# reading a 'file' may produce an amount of data not matching that
...
@@ -236,6 +303,9 @@ class chunked_producer:
...
@@ -236,6 +303,9 @@ class chunked_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
# Unfortunately this isn't very useful right now (Aug 97), because
# Unfortunately this isn't very useful right now (Aug 97), because
# apparently the browsers don't do on-the-fly decompression. Which
# apparently the browsers don't do on-the-fly decompression. Which
# is sad, because this could _really_ speed things up, especially for
# is sad, because this could _really_ speed things up, especially for
...
@@ -279,6 +349,9 @@ class compressed_producer:
...
@@ -279,6 +349,9 @@ class compressed_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
class
escaping_producer
:
class
escaping_producer
:
"A producer that escapes a sequence of characters"
"A producer that escapes a sequence of characters"
...
@@ -311,3 +384,7 @@ class escaping_producer:
...
@@ -311,3 +384,7 @@ class escaping_producer:
return
buffer
return
buffer
else
:
else
:
return
buffer
return
buffer
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
ZServer/zope_handler.py
View file @
ae12ae9f
...
@@ -22,17 +22,15 @@ import thread
...
@@ -22,17 +22,15 @@ import thread
from
PubCore
import
handle
from
PubCore
import
handle
from
medusa
import
counter
from
medusa
import
counter
,
producers
from
medusa
import
producers
from
medusa.default_handler
import
split_path
,
unquote
,
get_header
from
medusa.default_handler
import
split_path
,
unquote
,
get_header
from
medusa.producers
import
NotReady
HEADER_LINE
=
regex
.
compile
(
'
\
([A-Z
a
-z0-9-]+
\
):
\
(.*
\
)
'
)
CONTENT_LENGTH
=
regex
.
compile
(
'Content-Length:
\
([
0
-9]+
\
)
'
,regex.casefold)
CONTENT_LENGTH
=
regex
.
compile
(
'Content-Length:
\
([
0
-9]+
\
)
'
,regex.casefold)
CONNECTION = regex.compile ('
Connection
:
\
(.
*
\
)
', regex.casefold)
CONNECTION = regex.compile ('
Connection
:
\
(.
*
\
)
', regex.casefold)
# maps request some headers to environment variables.
# maps request some headers to environment variables.
# (those that don'
t
start
with
'HTTP_'
)
# (those that don'
t
start
with
'HTTP_'
)
#
header2env
=
{
'content-length'
:
'CONTENT_LENGTH'
,
header2env
=
{
'content-length'
:
'CONTENT_LENGTH'
,
'content-type'
:
'CONTENT_TYPE'
,
'content-type'
:
'CONTENT_TYPE'
,
'connection'
:
'CONNECTION_TYPE'
,
'connection'
:
'CONNECTION_TYPE'
,
...
@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
...
@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
class
zope_handler
:
class
zope_handler
:
"
P
ublishes a module with ZPublisher"
"
p
ublishes a module with ZPublisher"
# XXX add code to allow env overriding
# XXX add code to allow env overriding
...
@@ -72,10 +70,12 @@ class zope_handler:
...
@@ -72,10 +70,12 @@ class zope_handler:
else
:
else
:
return
0
return
0
def
handle_request
(
self
,
request
):
def
handle_request
(
self
,
request
):
self
.
hits
.
increment
()
self
.
hits
.
increment
()
if
request
.
command
in
[
"post"
,
"put"
]:
if
request
.
command
in
[
"post"
,
"put"
]:
size
=
get_header
(
CONTENT_LENGTH
,
request
.
header
)
size
=
get_header
(
CONTENT_LENGTH
,
request
.
header
)
size
=
string
.
atoi
(
size
)
if
size
>
1048576
:
if
size
>
1048576
:
# write large upload data to a file
# write large upload data to a file
from
tempfile
import
TemporaryFile
from
tempfile
import
TemporaryFile
...
@@ -83,7 +83,7 @@ class zope_handler:
...
@@ -83,7 +83,7 @@ class zope_handler:
else
:
else
:
self
.
data
=
StringIO
()
self
.
data
=
StringIO
()
self
.
request
=
request
self
.
request
=
request
request
.
channel
.
set_terminator
(
s
tring
.
atoi
(
size
)
)
request
.
channel
.
set_terminator
(
s
ize
)
request
.
collector
=
self
request
.
collector
=
self
else
:
else
:
sin
=
StringIO
()
sin
=
StringIO
()
...
@@ -134,11 +134,56 @@ class zope_handler:
...
@@ -134,11 +134,56 @@ class zope_handler:
def
continue_request
(
self
,
sin
,
request
):
def
continue_request
(
self
,
sin
,
request
):
"continue handling request now that we have the stdin"
"continue handling request now that we have the stdin"
producer
=
header_scanning_producer
(
request
)
outpipe
=
handle
(
self
.
module_name
,
self
.
get_environment
(
request
),
sin
)
pusher
=
file_pusher
(
producer
)
outpipe
=
handle
(
self
.
module_name
,
# now comes the hairy stuff. adapted from http_server
self
.
get_environment
(
request
),
sin
,
(
pusher
.
push
,
()))
#
connection
=
string
.
lower
(
get_header
(
CONNECTION
,
request
.
header
))
close_it
=
0
wrap_in_chunking
=
0
if
request
.
version
==
'1.0'
:
if
connection
==
'keep-alive'
:
if
not
request
.
has_key
(
'Content-Length'
):
close_it
=
1
else
:
request
[
'Connection'
]
=
'Keep-Alive'
else
:
close_it
=
1
elif
request
.
version
==
'1.1'
:
if
connection
==
'close'
:
close_it
=
1
elif
not
request
.
has_key
(
'Content-Length'
):
if
request
.
has_key
(
'Transfer-Encoding'
):
if
not
request
[
'Transfer-Encoding'
]
==
'chunked'
:
close_it
=
1
elif
request
.
use_chunked
:
request
[
'Transfer-Encoding'
]
=
'chunked'
wrap_in_chunking
=
1
else
:
close_it
=
1
if
close_it
:
request
[
'Connection'
]
=
'close'
outgoing_producer
=
header_scanning_producer
(
request
,
outpipe
)
# apply a few final transformations to the output
request
.
channel
.
push_with_producer
(
# hooking lets us log the number of bytes sent
producers
.
hooked_producer
(
outgoing_producer
,
request
.
log
)
)
request
.
channel
.
current_request
=
None
if
close_it
:
request
.
channel
.
close_when_done
()
def
status
(
self
):
def
status
(
self
):
return
producers
.
simple_producer
(
"""
return
producers
.
simple_producer
(
"""
<li>Zope Handler
<li>Zope Handler
...
@@ -150,7 +195,7 @@ class zope_handler:
...
@@ -150,7 +195,7 @@ class zope_handler:
# put and post collection methods
# put and post collection methods
#
#
def
collect_incoming_data
(
self
,
data
):
def
collect_incoming_data
(
self
,
data
):
self
.
data
.
write
(
data
)
self
.
data
.
write
(
data
)
def
found_terminator
(
self
):
def
found_terminator
(
self
):
...
@@ -166,117 +211,50 @@ class zope_handler:
...
@@ -166,117 +211,50 @@ class zope_handler:
self
.
continue_request
(
d
,
r
)
self
.
continue_request
(
d
,
r
)
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class
file_pusher
:
"pushs data from one file into another file"
def
__init__
(
self
,
out
):
self
.
out
=
out
def
push
(
self
,
file
):
"push data from file to out. if file is exhausted, close out."
data
=
file
.
read
()
if
data
!=
''
:
self
.
out
.
write
(
data
)
else
:
self
.
out
.
close
()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class
header_scanning_producer
:
class
header_scanning_producer
:
"""This producer accepts data with the write method.
"""This weird producer patches together
It scans the data, munges HTTP headers and pushes
medusa's idea of http headers with ZPublisher's
it into a channel. When done it logs information."""
"""
def
__init__
(
self
,
request
,
pipe
):
close_it
=
1
self
.
request
=
request
exit
=
None
self
.
pipe
=
pipe
self
.
done
=
None
def
__init__
(
self
,
request
):
self
.
buffer
=
''
self
.
buffer
=
''
self
.
exit
=
None
self
.
request
=
request
self
.
channel
=
request
.
channel
self
.
got_header
=
0
self
.
bytes_out
=
counter
.
counter
()
def
write
(
self
,
data
):
if
self
.
got_header
:
self
.
push
(
data
)
else
:
self
.
buffer
=
self
.
buffer
+
data
i
=
string
.
find
(
self
.
buffer
,
'
\
n
\
n
'
)
if
i
!=
-
1
:
self
.
got_header
=
1
headers
=
string
.
split
(
self
.
buffer
[:
i
],
'
\
n
'
)
self
.
push
(
self
.
build_header
(
headers
))
self
.
push
(
self
.
buffer
[
i
+
2
:])
self
.
buffer
=
''
def
build_header
(
self
,
lines
):
status
=
'200 OK'
headers
=
[]
header_dict
=
{}
for
line
in
lines
:
[
header
,
header_value
]
=
string
.
split
(
line
,
": "
,
1
)
header_dict
[
header
]
=
header_value
if
header
==
"Status"
:
status
=
header_value
elif
line
:
headers
.
append
(
line
)
self
.
request
.
reply_code
=
string
.
atoi
(
status
[:
3
])
# for logging
headers
.
insert
(
0
,
'HTTP/1.0 %s'
%
status
)
headers
.
append
(
'Server: '
+
self
.
request
[
'Server'
])
headers
.
append
(
'Date: '
+
self
.
request
[
'Date'
])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection
=
string
.
lower
(
get_header
(
CONNECTION
,
self
.
request
.
header
))
if
connection
==
'keep-alive'
and
\
header_dict
.
has_key
(
'Content-Length'
):
self
.
close_it
=
None
headers
.
append
(
'Connection: Keep-Alive'
)
else
:
headers
.
append
(
'Connection: close'
)
self
.
close_it
=
1
if
header_dict
.
has_key
(
'Bobo-Exception-Type'
)
and
\
def
more
(
self
):
header_dict
[
'Bobo-Exception-Type'
]
==
'exceptions.SystemExit'
:
if
self
.
buffer
:
self
.
exit
=
1
b
=
self
.
buffer
self
.
buffer
=
''
return
string
.
join
(
headers
,
'
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
return
b
data
=
self
.
pipe
.
read
()
def
push
(
self
,
data
):
if
data
is
None
:
self
.
bytes_out
.
increment
(
len
(
data
))
raise
NotReady
()
self
.
channel
.
push
(
data
)
return
data
def
close
(
self
):
def
ready
(
self
):
self
.
request
.
log
(
int
(
self
.
bytes_out
.
as_long
()))
if
self
.
done
:
self
.
request
.
channel
.
current_request
=
None
return
self
.
pipe
.
ready
()
if
self
.
exit
:
elif
self
.
pipe
.
ready
():
self
.
channel
.
producer_fifo
.
push
(
exit_producer
())
self
.
buffer
=
self
.
buffer
+
self
.
pipe
.
read
()
elif
self
.
close_it
:
if
string
.
find
(
self
.
buffer
,
"
\
n
\
n
"
):
self
.
channel
.
close_when_done
()
[
headers
,
html
]
=
string
.
split
(
self
.
buffer
,
"
\
n
\
n
"
,
1
)
headers
=
string
.
split
(
headers
,
"
\
n
"
)
# is this necessary?
for
line
in
headers
:
del
self
.
request
[
header
,
header_value
]
=
string
.
split
(
line
,
": "
,
1
)
del
self
.
channel
if
header
==
"Status"
:
[
code
,
message
]
=
string
.
split
(
header_value
,
" "
,
1
)
self
.
request
.
reply_code
=
string
.
atoi
(
code
)
elif
header
==
"Bobo-Exception-Type"
and
\
header_value
==
"exceptions.SystemExit"
:
self
.
exit
=
1
else
:
self
.
request
[
header
]
=
header_value
self
.
buffer
=
self
.
request
.
build_reply_header
()
+
html
del
self
.
request
self
.
done
=
1
class
exit_producer
:
class
exit_producer
:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def
more
(
self
):
def
more
(
self
):
raise
SystemExit
# perhaps there could be a more graceful shutdown.
asyncore
.
close_all
()
\ No newline at end of file
lib/python/ZServer/medusa/http_server.py
View file @
ae12ae9f
...
@@ -9,7 +9,7 @@
...
@@ -9,7 +9,7 @@
# interested in using this software in a commercial context, or in
# interested in using this software in a commercial context, or in
# purchasing support, please contact the author.
# purchasing support, please contact the author.
RCS_ID
=
'$Id: http_server.py,v 1.
3 1999/01/15 02:27:04
amos Exp $'
RCS_ID
=
'$Id: http_server.py,v 1.
4 1999/01/18 22:44:49
amos Exp $'
# python modules
# python modules
import
os
import
os
...
@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
...
@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
current_request
=
None
current_request
=
None
channel_counter
=
counter
()
channel_counter
=
counter
()
writable
=
asynchat
.
async_chat
.
writable_future
def
__init__
(
self
,
server
,
conn
,
addr
):
def
__init__
(
self
,
server
,
conn
,
addr
):
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
self
.
channel_number
=
http_channel
.
channel_counter
.
increment
()
...
@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
...
@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
return
return
# no handlers, so complain
# no handlers, so complain
r
.
error
(
404
)
r
.
error
(
404
)
def
writable
(
self
):
# this is just the normal async_chat 'writable', here for comparison
return
self
.
ac_out_buffer
or
len
(
self
.
producer_fifo
)
def
writable_for_proxy
(
self
):
def
writable_for_proxy
(
self
):
# this version of writable supports the idea of a 'stalled' producer
# this version of writable supports the idea of a 'stalled' producer
...
@@ -598,32 +595,13 @@ def crack_request (r):
...
@@ -598,32 +595,13 @@ def crack_request (r):
version
=
None
version
=
None
return
string
.
lower
(
REQUEST
.
group
(
1
)),
REQUEST
.
group
(
2
),
version
return
string
.
lower
(
REQUEST
.
group
(
1
)),
REQUEST
.
group
(
2
),
version
class
fifo
:
class
fifo
(
asynchat
.
fifo
):
def
__init__
(
self
,
list
=
None
):
if
not
list
:
self
.
list
=
[]
else
:
self
.
list
=
list
def
__len__
(
self
):
return
len
(
self
.
list
)
def
first
(
self
):
return
self
.
list
[
0
]
def
push_front
(
self
,
object
):
def
push_front
(
self
,
object
):
self
.
list
.
insert
(
0
,
object
)
self
.
list
.
insert
(
0
,
object
)
def
push
(
self
,
data
):
self
.
list
.
append
(
data
)
def
pop
(
self
):
if
self
.
list
:
result
=
self
.
list
[
0
]
del
self
.
list
[
0
]
return
(
1
,
result
)
else
:
return
(
0
,
None
)
def
compute_timezone_for_log
():
def
compute_timezone_for_log
():
if
time
.
daylight
:
if
time
.
daylight
:
...
...
lib/python/ZServer/medusa/producers.py
View file @
ae12ae9f
# -*- Mode: Python; tab-width: 4 -*-
# -*- Mode: Python; tab-width: 4 -*-
RCS_ID
=
'$Id: producers.py,v 1.
3 1999/01/15 02:27:5
2 amos Exp $'
RCS_ID
=
'$Id: producers.py,v 1.
4 1999/01/18 22:45:2
2 amos Exp $'
import
string
import
string
...
@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
...
@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
For example, you can feed dynamically-produced output into the compressing
For example, you can feed dynamically-produced output into the compressing
producer, then wrap this with the 'chunked' transfer-encoding producer.
producer, then wrap this with the 'chunked' transfer-encoding producer.
Producer Interface:
All producers have a 'more' method. 'more' returns a string of output.
'more' can be called multiple times. When 'more' returns '', the producer
is exhausted.
Optional Future Producer Interface:
The future producer interface adds a 'ready' method to producers. This
allows future producers which may not be ready until after they are
created. Returning false means that a call to 'more' will not give you
useful information, right now, but will later. When a producer is exhausted,
it should return true for 'ready'. Producers which are not ready should raise
'NotReady' when their 'more' method is called.
Note: Not having a 'ready' method implies that a producer is always ready.
Note: Composite producers will probably have to consult their sub-produces
to ensure readiness.
Note: If you don't wish to use future producers nothing changes. Simply don't
call a producer's ready method. Everything works as before.
"""
"""
class
NotReady
(
Exception
):
"""Raised by future producers when their more method is called
when they are not ready."""
pass
class
simple_producer
:
class
simple_producer
:
"producer for a string"
"producer for a string"
def
__init__
(
self
,
data
,
buffer_size
=
1024
):
def
__init__
(
self
,
data
,
buffer_size
=
1024
):
...
@@ -55,9 +84,6 @@ class lines_producer:
...
@@ -55,9 +84,6 @@ class lines_producer:
def
__init__
(
self
,
lines
):
def
__init__
(
self
,
lines
):
self
.
lines
=
lines
self
.
lines
=
lines
def
ready
(
self
):
return
len
(
self
.
lines
)
def
more
(
self
):
def
more
(
self
):
if
self
.
lines
:
if
self
.
lines
:
chunk
=
self
.
lines
[:
50
]
chunk
=
self
.
lines
[:
50
]
...
@@ -89,6 +115,7 @@ class file_producer:
...
@@ -89,6 +115,7 @@ class file_producer:
else
:
else
:
return
data
return
data
# A simple output producer. This one does not [yet] have
# A simple output producer. This one does not [yet] have
# the safety feature builtin to the monitor channel: runaway
# the safety feature builtin to the monitor channel: runaway
# output will not be caught.
# output will not be caught.
...
@@ -98,9 +125,11 @@ class file_producer:
...
@@ -98,9 +125,11 @@ class file_producer:
class
output_producer
:
class
output_producer
:
"Acts like an output file; suitable for capturing sys.stdout"
"Acts like an output file; suitable for capturing sys.stdout"
def
__init__
(
self
):
def
__init__
(
self
):
self
.
data
=
''
self
.
data
=
''
self
.
closed
=
None
def
write
(
self
,
data
):
def
write
(
self
,
data
):
lines
=
string
.
splitfields
(
data
,
'
\
n
'
)
lines
=
string
.
splitfields
(
data
,
'
\
n
'
)
data
=
string
.
join
(
lines
,
'
\
r
\
n
'
)
data
=
string
.
join
(
lines
,
'
\
r
\
n
'
)
...
@@ -116,7 +145,7 @@ class output_producer:
...
@@ -116,7 +145,7 @@ class output_producer:
)
+
'
\
r
\
n
'
)
+
'
\
r
\
n
'
def
ready
(
self
):
def
ready
(
self
):
return
(
len
(
self
.
data
)
>
0
)
return
(
len
(
self
.
data
)
>
0
)
or
self
.
closed
def
flush
(
self
):
def
flush
(
self
):
pass
pass
...
@@ -131,13 +160,26 @@ class output_producer:
...
@@ -131,13 +160,26 @@ class output_producer:
return
result
return
result
else
:
else
:
return
''
return
''
def
close
(
self
):
self
.
closed
=
1
class
composite_producer
:
class
composite_producer
:
"combine a fifo of producers into one"
"combine a fifo of producers into one"
def
__init__
(
self
,
producers
):
def
__init__
(
self
,
producers
):
self
.
producers
=
producers
self
.
producers
=
producers
self
.
buffer
=
''
def
more
(
self
):
def
more
(
self
):
if
self
.
buffer
:
b
=
self
.
buffer
self
.
buffer
=
''
return
b
# we should only get here when not
# using the ready option
while
len
(
self
.
producers
):
while
len
(
self
.
producers
):
p
=
self
.
producers
.
first
()
p
=
self
.
producers
.
first
()
d
=
p
.
more
()
d
=
p
.
more
()
...
@@ -148,6 +190,21 @@ class composite_producer:
...
@@ -148,6 +190,21 @@ class composite_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
# This producer requires a buffer to ensure
# that it really is ready when it says so
if
self
.
buffer
or
len
(
self
.
producers
)
==
0
:
return
1
while
self
.
producers
.
ready
():
p
=
self
.
producers
.
first
()
d
=
p
.
more
()
if
d
:
self
.
buffer
=
d
else
:
self
.
producers
.
pop
()
if
self
.
buffer
or
len
(
self
.
producers
)
==
0
:
return
1
class
globbing_producer
:
class
globbing_producer
:
"""
"""
...
@@ -172,6 +229,13 @@ class globbing_producer:
...
@@ -172,6 +229,13 @@ class globbing_producer:
self
.
buffer
=
''
self
.
buffer
=
''
return
r
return
r
def
ready
(
self
):
# XXX doesn't in fact guarentee ready. Should probably
# redo this one like the composite producer... But
# it's not a big deal, cause refill_buffer will
# catch the NotReady exception...
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
class
hooked_producer
:
class
hooked_producer
:
"""
"""
...
@@ -197,6 +261,9 @@ class hooked_producer:
...
@@ -197,6 +261,9 @@ class hooked_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# correct. In the face of Strange Files, it is conceivable that
# correct. In the face of Strange Files, it is conceivable that
# reading a 'file' may produce an amount of data not matching that
# reading a 'file' may produce an amount of data not matching that
...
@@ -236,6 +303,9 @@ class chunked_producer:
...
@@ -236,6 +303,9 @@ class chunked_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
# Unfortunately this isn't very useful right now (Aug 97), because
# Unfortunately this isn't very useful right now (Aug 97), because
# apparently the browsers don't do on-the-fly decompression. Which
# apparently the browsers don't do on-the-fly decompression. Which
# is sad, because this could _really_ speed things up, especially for
# is sad, because this could _really_ speed things up, especially for
...
@@ -279,6 +349,9 @@ class compressed_producer:
...
@@ -279,6 +349,9 @@ class compressed_producer:
else
:
else
:
return
''
return
''
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
class
escaping_producer
:
class
escaping_producer
:
"A producer that escapes a sequence of characters"
"A producer that escapes a sequence of characters"
...
@@ -311,3 +384,7 @@ class escaping_producer:
...
@@ -311,3 +384,7 @@ class escaping_producer:
return
buffer
return
buffer
else
:
else
:
return
buffer
return
buffer
def
ready
(
self
):
return
not
hasattr
(
self
.
producer
,
'ready'
)
or
self
.
producer
.
ready
()
lib/python/ZServer/zope_handler.py
View file @
ae12ae9f
...
@@ -22,17 +22,15 @@ import thread
...
@@ -22,17 +22,15 @@ import thread
from
PubCore
import
handle
from
PubCore
import
handle
from
medusa
import
counter
from
medusa
import
counter
,
producers
from
medusa
import
producers
from
medusa.default_handler
import
split_path
,
unquote
,
get_header
from
medusa.default_handler
import
split_path
,
unquote
,
get_header
from
medusa.producers
import
NotReady
HEADER_LINE
=
regex
.
compile
(
'
\
([A-Z
a
-z0-9-]+
\
):
\
(.*
\
)
'
)
CONTENT_LENGTH
=
regex
.
compile
(
'Content-Length:
\
([
0
-9]+
\
)
'
,regex.casefold)
CONTENT_LENGTH
=
regex
.
compile
(
'Content-Length:
\
([
0
-9]+
\
)
'
,regex.casefold)
CONNECTION = regex.compile ('
Connection
:
\
(.
*
\
)
', regex.casefold)
CONNECTION = regex.compile ('
Connection
:
\
(.
*
\
)
', regex.casefold)
# maps request some headers to environment variables.
# maps request some headers to environment variables.
# (those that don'
t
start
with
'HTTP_'
)
# (those that don'
t
start
with
'HTTP_'
)
#
header2env
=
{
'content-length'
:
'CONTENT_LENGTH'
,
header2env
=
{
'content-length'
:
'CONTENT_LENGTH'
,
'content-type'
:
'CONTENT_TYPE'
,
'content-type'
:
'CONTENT_TYPE'
,
'connection'
:
'CONNECTION_TYPE'
,
'connection'
:
'CONNECTION_TYPE'
,
...
@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
...
@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
class
zope_handler
:
class
zope_handler
:
"
P
ublishes a module with ZPublisher"
"
p
ublishes a module with ZPublisher"
# XXX add code to allow env overriding
# XXX add code to allow env overriding
...
@@ -72,10 +70,12 @@ class zope_handler:
...
@@ -72,10 +70,12 @@ class zope_handler:
else
:
else
:
return
0
return
0
def
handle_request
(
self
,
request
):
def
handle_request
(
self
,
request
):
self
.
hits
.
increment
()
self
.
hits
.
increment
()
if
request
.
command
in
[
"post"
,
"put"
]:
if
request
.
command
in
[
"post"
,
"put"
]:
size
=
get_header
(
CONTENT_LENGTH
,
request
.
header
)
size
=
get_header
(
CONTENT_LENGTH
,
request
.
header
)
size
=
string
.
atoi
(
size
)
if
size
>
1048576
:
if
size
>
1048576
:
# write large upload data to a file
# write large upload data to a file
from
tempfile
import
TemporaryFile
from
tempfile
import
TemporaryFile
...
@@ -83,7 +83,7 @@ class zope_handler:
...
@@ -83,7 +83,7 @@ class zope_handler:
else
:
else
:
self
.
data
=
StringIO
()
self
.
data
=
StringIO
()
self
.
request
=
request
self
.
request
=
request
request
.
channel
.
set_terminator
(
s
tring
.
atoi
(
size
)
)
request
.
channel
.
set_terminator
(
s
ize
)
request
.
collector
=
self
request
.
collector
=
self
else
:
else
:
sin
=
StringIO
()
sin
=
StringIO
()
...
@@ -134,11 +134,56 @@ class zope_handler:
...
@@ -134,11 +134,56 @@ class zope_handler:
def
continue_request
(
self
,
sin
,
request
):
def
continue_request
(
self
,
sin
,
request
):
"continue handling request now that we have the stdin"
"continue handling request now that we have the stdin"
producer
=
header_scanning_producer
(
request
)
outpipe
=
handle
(
self
.
module_name
,
self
.
get_environment
(
request
),
sin
)
pusher
=
file_pusher
(
producer
)
outpipe
=
handle
(
self
.
module_name
,
# now comes the hairy stuff. adapted from http_server
self
.
get_environment
(
request
),
sin
,
(
pusher
.
push
,
()))
#
connection
=
string
.
lower
(
get_header
(
CONNECTION
,
request
.
header
))
close_it
=
0
wrap_in_chunking
=
0
if
request
.
version
==
'1.0'
:
if
connection
==
'keep-alive'
:
if
not
request
.
has_key
(
'Content-Length'
):
close_it
=
1
else
:
request
[
'Connection'
]
=
'Keep-Alive'
else
:
close_it
=
1
elif
request
.
version
==
'1.1'
:
if
connection
==
'close'
:
close_it
=
1
elif
not
request
.
has_key
(
'Content-Length'
):
if
request
.
has_key
(
'Transfer-Encoding'
):
if
not
request
[
'Transfer-Encoding'
]
==
'chunked'
:
close_it
=
1
elif
request
.
use_chunked
:
request
[
'Transfer-Encoding'
]
=
'chunked'
wrap_in_chunking
=
1
else
:
close_it
=
1
if
close_it
:
request
[
'Connection'
]
=
'close'
outgoing_producer
=
header_scanning_producer
(
request
,
outpipe
)
# apply a few final transformations to the output
request
.
channel
.
push_with_producer
(
# hooking lets us log the number of bytes sent
producers
.
hooked_producer
(
outgoing_producer
,
request
.
log
)
)
request
.
channel
.
current_request
=
None
if
close_it
:
request
.
channel
.
close_when_done
()
def
status
(
self
):
def
status
(
self
):
return
producers
.
simple_producer
(
"""
return
producers
.
simple_producer
(
"""
<li>Zope Handler
<li>Zope Handler
...
@@ -150,7 +195,7 @@ class zope_handler:
...
@@ -150,7 +195,7 @@ class zope_handler:
# put and post collection methods
# put and post collection methods
#
#
def
collect_incoming_data
(
self
,
data
):
def
collect_incoming_data
(
self
,
data
):
self
.
data
.
write
(
data
)
self
.
data
.
write
(
data
)
def
found_terminator
(
self
):
def
found_terminator
(
self
):
...
@@ -166,117 +211,50 @@ class zope_handler:
...
@@ -166,117 +211,50 @@ class zope_handler:
self
.
continue_request
(
d
,
r
)
self
.
continue_request
(
d
,
r
)
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class
file_pusher
:
"pushs data from one file into another file"
def
__init__
(
self
,
out
):
self
.
out
=
out
def
push
(
self
,
file
):
"push data from file to out. if file is exhausted, close out."
data
=
file
.
read
()
if
data
!=
''
:
self
.
out
.
write
(
data
)
else
:
self
.
out
.
close
()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class
header_scanning_producer
:
class
header_scanning_producer
:
"""This producer accepts data with the write method.
"""This weird producer patches together
It scans the data, munges HTTP headers and pushes
medusa's idea of http headers with ZPublisher's
it into a channel. When done it logs information."""
"""
def
__init__
(
self
,
request
,
pipe
):
close_it
=
1
self
.
request
=
request
exit
=
None
self
.
pipe
=
pipe
self
.
done
=
None
def
__init__
(
self
,
request
):
self
.
buffer
=
''
self
.
buffer
=
''
self
.
exit
=
None
self
.
request
=
request
self
.
channel
=
request
.
channel
self
.
got_header
=
0
self
.
bytes_out
=
counter
.
counter
()
def
write
(
self
,
data
):
if
self
.
got_header
:
self
.
push
(
data
)
else
:
self
.
buffer
=
self
.
buffer
+
data
i
=
string
.
find
(
self
.
buffer
,
'
\
n
\
n
'
)
if
i
!=
-
1
:
self
.
got_header
=
1
headers
=
string
.
split
(
self
.
buffer
[:
i
],
'
\
n
'
)
self
.
push
(
self
.
build_header
(
headers
))
self
.
push
(
self
.
buffer
[
i
+
2
:])
self
.
buffer
=
''
def
build_header
(
self
,
lines
):
status
=
'200 OK'
headers
=
[]
header_dict
=
{}
for
line
in
lines
:
[
header
,
header_value
]
=
string
.
split
(
line
,
": "
,
1
)
header_dict
[
header
]
=
header_value
if
header
==
"Status"
:
status
=
header_value
elif
line
:
headers
.
append
(
line
)
self
.
request
.
reply_code
=
string
.
atoi
(
status
[:
3
])
# for logging
headers
.
insert
(
0
,
'HTTP/1.0 %s'
%
status
)
headers
.
append
(
'Server: '
+
self
.
request
[
'Server'
])
headers
.
append
(
'Date: '
+
self
.
request
[
'Date'
])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection
=
string
.
lower
(
get_header
(
CONNECTION
,
self
.
request
.
header
))
if
connection
==
'keep-alive'
and
\
header_dict
.
has_key
(
'Content-Length'
):
self
.
close_it
=
None
headers
.
append
(
'Connection: Keep-Alive'
)
else
:
headers
.
append
(
'Connection: close'
)
self
.
close_it
=
1
if
header_dict
.
has_key
(
'Bobo-Exception-Type'
)
and
\
def
more
(
self
):
header_dict
[
'Bobo-Exception-Type'
]
==
'exceptions.SystemExit'
:
if
self
.
buffer
:
self
.
exit
=
1
b
=
self
.
buffer
self
.
buffer
=
''
return
string
.
join
(
headers
,
'
\
r
\
n
'
)
+
'
\
r
\
n
\
r
\
n
'
return
b
data
=
self
.
pipe
.
read
()
def
push
(
self
,
data
):
if
data
is
None
:
self
.
bytes_out
.
increment
(
len
(
data
))
raise
NotReady
()
self
.
channel
.
push
(
data
)
return
data
def
close
(
self
):
def
ready
(
self
):
self
.
request
.
log
(
int
(
self
.
bytes_out
.
as_long
()))
if
self
.
done
:
self
.
request
.
channel
.
current_request
=
None
return
self
.
pipe
.
ready
()
if
self
.
exit
:
elif
self
.
pipe
.
ready
():
self
.
channel
.
producer_fifo
.
push
(
exit_producer
())
self
.
buffer
=
self
.
buffer
+
self
.
pipe
.
read
()
elif
self
.
close_it
:
if
string
.
find
(
self
.
buffer
,
"
\
n
\
n
"
):
self
.
channel
.
close_when_done
()
[
headers
,
html
]
=
string
.
split
(
self
.
buffer
,
"
\
n
\
n
"
,
1
)
headers
=
string
.
split
(
headers
,
"
\
n
"
)
# is this necessary?
for
line
in
headers
:
del
self
.
request
[
header
,
header_value
]
=
string
.
split
(
line
,
": "
,
1
)
del
self
.
channel
if
header
==
"Status"
:
[
code
,
message
]
=
string
.
split
(
header_value
,
" "
,
1
)
self
.
request
.
reply_code
=
string
.
atoi
(
code
)
elif
header
==
"Bobo-Exception-Type"
and
\
header_value
==
"exceptions.SystemExit"
:
self
.
exit
=
1
else
:
self
.
request
[
header
]
=
header_value
self
.
buffer
=
self
.
request
.
build_reply_header
()
+
html
del
self
.
request
self
.
done
=
1
class
exit_producer
:
class
exit_producer
:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def
more
(
self
):
def
more
(
self
):
raise
SystemExit
# perhaps there could be a more graceful shutdown.
asyncore
.
close_all
()
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment