Commit ae12ae9f authored by Amos Latteier's avatar Amos Latteier

added back future producer stuff.

parent 1d9f3e9e
......@@ -9,7 +9,7 @@
# interested in using this software in a commercial context, or in
# purchasing support, please contact the author.
RCS_ID = '$Id: http_server.py,v 1.3 1999/01/15 02:27:04 amos Exp $'
RCS_ID = '$Id: http_server.py,v 1.4 1999/01/18 22:44:49 amos Exp $'
# python modules
import os
......@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
current_request = None
channel_counter = counter()
writable=asynchat.async_chat.writable_future
def __init__ (self, server, conn, addr):
self.channel_number = http_channel.channel_counter.increment()
......@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
return
# no handlers, so complain
r.error (404)
def writable (self):
# this is just the normal async_chat 'writable', here for comparison
return self.ac_out_buffer or len(self.producer_fifo)
r.error (404)
def writable_for_proxy (self):
# this version of writable supports the idea of a 'stalled' producer
......@@ -598,32 +595,13 @@ def crack_request (r):
version = None
return string.lower (REQUEST.group (1)), REQUEST.group(2), version
class fifo:
def __init__ (self, list=None):
if not list:
self.list = []
else:
self.list = list
def __len__ (self):
return len(self.list)
def first (self):
return self.list[0]
class fifo(asynchat.fifo):
def push_front (self, object):
self.list.insert (0, object)
def push (self, data):
self.list.append (data)
def pop (self):
if self.list:
result = self.list[0]
del self.list[0]
return (1, result)
else:
return (0, None)
def compute_timezone_for_log ():
if time.daylight:
......
# -*- Mode: Python; tab-width: 4 -*-
RCS_ID = '$Id: producers.py,v 1.3 1999/01/15 02:27:52 amos Exp $'
RCS_ID = '$Id: producers.py,v 1.4 1999/01/18 22:45:22 amos Exp $'
import string
......@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
For example, you can feed dynamically-produced output into the compressing
producer, then wrap this with the 'chunked' transfer-encoding producer.
Producer Interface:
All producers have a 'more' method. 'more' returns a string of output.
'more' can be called multiple times. When 'more' returns '', the producer
is exhausted.
Optional Future Producer Interface:
The future producer interface adds a 'ready' method to producers. This
allows future producers which may not be ready until after they are
created. Returning false means that a call to 'more' will not give you
useful information, right now, but will later. When a producer is exhausted,
it should return true for 'ready'. Producers which are not ready should raise
'NotReady' when their 'more' method is called.
Note: Not having a 'ready' method implies that a producer is always ready.
Note: Composite producers will probably have to consult their sub-produces
to ensure readiness.
Note: If you don't wish to use future producers nothing changes. Simply don't
call a producer's ready method. Everything works as before.
"""
class NotReady(Exception):
"""Raised by future producers when their more method is called
when they are not ready."""
pass
class simple_producer:
"producer for a string"
def __init__ (self, data, buffer_size=1024):
......@@ -55,9 +84,6 @@ class lines_producer:
def __init__ (self, lines):
self.lines = lines
def ready (self):
return len(self.lines)
def more (self):
if self.lines:
chunk = self.lines[:50]
......@@ -89,6 +115,7 @@ class file_producer:
else:
return data
# A simple output producer. This one does not [yet] have
# the safety feature builtin to the monitor channel: runaway
# output will not be caught.
......@@ -98,9 +125,11 @@ class file_producer:
class output_producer:
"Acts like an output file; suitable for capturing sys.stdout"
def __init__ (self):
self.data = ''
self.closed = None
def write (self, data):
lines = string.splitfields (data, '\n')
data = string.join (lines, '\r\n')
......@@ -116,7 +145,7 @@ class output_producer:
) + '\r\n'
def ready (self):
return (len (self.data) > 0)
return (len (self.data) > 0) or self.closed
def flush (self):
pass
......@@ -131,13 +160,26 @@ class output_producer:
return result
else:
return ''
def close(self):
self.closed=1
class composite_producer:
"combine a fifo of producers into one"
def __init__ (self, producers):
self.producers = producers
self.buffer = ''
def more (self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
# we should only get here when not
# using the ready option
while len(self.producers):
p = self.producers.first()
d = p.more()
......@@ -148,6 +190,21 @@ class composite_producer:
else:
return ''
def ready(self):
# This producer requires a buffer to ensure
# that it really is ready when it says so
if self.buffer or len(self.producers)==0:
return 1
while self.producers.ready():
p = self.producers.first()
d = p.more()
if d:
self.buffer=d
else:
self.producers.pop()
if self.buffer or len(self.producers)==0:
return 1
class globbing_producer:
"""
......@@ -172,6 +229,13 @@ class globbing_producer:
self.buffer = ''
return r
def ready(self):
# XXX doesn't in fact guarentee ready. Should probably
# redo this one like the composite producer... But
# it's not a big deal, cause refill_buffer will
# catch the NotReady exception...
return not hasattr(self.producer,'ready') or self.producer.ready()
class hooked_producer:
"""
......@@ -197,6 +261,9 @@ class hooked_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# correct. In the face of Strange Files, it is conceivable that
# reading a 'file' may produce an amount of data not matching that
......@@ -236,6 +303,9 @@ class chunked_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
# Unfortunately this isn't very useful right now (Aug 97), because
# apparently the browsers don't do on-the-fly decompression. Which
# is sad, because this could _really_ speed things up, especially for
......@@ -279,6 +349,9 @@ class compressed_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
class escaping_producer:
"A producer that escapes a sequence of characters"
......@@ -311,3 +384,7 @@ class escaping_producer:
return buffer
else:
return buffer
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
......@@ -22,17 +22,15 @@ import thread
from PubCore import handle
from medusa import counter
from medusa import producers
from medusa import counter, producers
from medusa.default_handler import split_path, unquote, get_header
from medusa.producers import NotReady
HEADER_LINE = regex.compile ('\([A-Za-z0-9-]+\): \(.*\)')
CONTENT_LENGTH = regex.compile('Content-Length: \([0-9]+\)',regex.casefold)
CONNECTION = regex.compile ('Connection: \(.*\)', regex.casefold)
# maps request some headers to environment variables.
# (those that don't start with 'HTTP_')
#
header2env={'content-length' : 'CONTENT_LENGTH',
'content-type' : 'CONTENT_TYPE',
'connection' : 'CONNECTION_TYPE',
......@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
class zope_handler:
"Publishes a module with ZPublisher"
"publishes a module with ZPublisher"
# XXX add code to allow env overriding
......@@ -72,10 +70,12 @@ class zope_handler:
else:
return 0
def handle_request(self,request):
self.hits.increment()
if request.command in ["post","put"]:
size=get_header(CONTENT_LENGTH, request.header)
size=string.atoi(size)
if size > 1048576:
# write large upload data to a file
from tempfile import TemporaryFile
......@@ -83,7 +83,7 @@ class zope_handler:
else:
self.data = StringIO()
self.request = request
request.channel.set_terminator(string.atoi(size))
request.channel.set_terminator(size)
request.collector=self
else:
sin=StringIO()
......@@ -134,11 +134,56 @@ class zope_handler:
def continue_request(self,sin,request):
"continue handling request now that we have the stdin"
producer=header_scanning_producer(request)
pusher=file_pusher(producer)
outpipe=handle(self.module_name,
self.get_environment(request), sin, (pusher.push, ()))
outpipe=handle(self.module_name,self.get_environment(request),sin)
# now comes the hairy stuff. adapted from http_server
#
connection = string.lower(get_header(CONNECTION,request.header))
close_it = 0
wrap_in_chunking = 0
if request.version == '1.0':
if connection == 'keep-alive':
if not request.has_key ('Content-Length'):
close_it = 1
else:
request['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif request.version == '1.1':
if connection == 'close':
close_it = 1
elif not request.has_key ('Content-Length'):
if request.has_key ('Transfer-Encoding'):
if not request['Transfer-Encoding'] == 'chunked':
close_it = 1
elif request.use_chunked:
request['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
if close_it:
request['Connection'] = 'close'
outgoing_producer = header_scanning_producer(request,outpipe)
# apply a few final transformations to the output
request.channel.push_with_producer (
# hooking lets us log the number of bytes sent
producers.hooked_producer (
outgoing_producer,
request.log
)
)
request.channel.current_request = None
if close_it:
request.channel.close_when_done()
def status (self):
return producers.simple_producer("""
<li>Zope Handler
......@@ -150,7 +195,7 @@ class zope_handler:
# put and post collection methods
#
def collect_incoming_data(self, data):
def collect_incoming_data (self, data):
self.data.write(data)
def found_terminator(self):
......@@ -166,117 +211,50 @@ class zope_handler:
self.continue_request(d,r)
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class file_pusher:
"pushs data from one file into another file"
def __init__(self,out):
self.out=out
def push(self,file):
"push data from file to out. if file is exhausted, close out."
data=file.read()
if data != '':
self.out.write(data)
else:
self.out.close()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class header_scanning_producer:
"""This producer accepts data with the write method.
It scans the data, munges HTTP headers and pushes
it into a channel. When done it logs information."""
close_it=1
exit=None
def __init__ (self, request):
self.buffer = ''
self.request = request
self.channel = request.channel
self.got_header = 0
self.bytes_out = counter.counter()
def write (self, data):
if self.got_header:
self.push(data)
else:
self.buffer = self.buffer + data
i=string.find(self.buffer,'\n\n')
if i != -1:
self.got_header = 1
headers=string.split(self.buffer[:i],'\n')
self.push(self.build_header(headers))
self.push(self.buffer[i+2:])
self.buffer = ''
def build_header (self, lines):
status = '200 OK'
headers=[]
header_dict={}
for line in lines:
[header, header_value]=string.split(line,": ",1)
header_dict[header]=header_value
if header=="Status":
status=header_value
elif line:
headers.append(line)
self.request.reply_code=string.atoi(status[:3]) # for logging
headers.insert (0, 'HTTP/1.0 %s' % status)
headers.append ('Server: ' + self.request['Server'])
headers.append ('Date: ' + self.request['Date'])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection = string.lower(get_header
(CONNECTION, self.request.header))
if connection == 'keep-alive' and \
header_dict.has_key ('Content-Length'):
self.close_it=None
headers.append('Connection: Keep-Alive')
else:
headers.append ('Connection: close')
self.close_it=1
"""This weird producer patches together
medusa's idea of http headers with ZPublisher's
"""
def __init__(self,request,pipe):
self.request=request
self.pipe=pipe
self.done=None
self.buffer=''
self.exit=None
if header_dict.has_key('Bobo-Exception-Type') and \
header_dict['Bobo-Exception-Type']=='exceptions.SystemExit':
self.exit=1
return string.join (headers, '\r\n')+'\r\n\r\n'
def push(self, data):
self.bytes_out.increment(len(data))
self.channel.push(data)
def close (self):
self.request.log(int(self.bytes_out.as_long()))
self.request.channel.current_request = None
if self.exit:
self.channel.producer_fifo.push(exit_producer())
elif self.close_it:
self.channel.close_when_done()
# is this necessary?
del self.request
del self.channel
def more(self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
data=self.pipe.read()
if data is None:
raise NotReady()
return data
def ready(self):
if self.done:
return self.pipe.ready()
elif self.pipe.ready():
self.buffer=self.buffer+self.pipe.read()
if string.find(self.buffer,"\n\n"):
[headers,html]=string.split(self.buffer,"\n\n",1)
headers=string.split(headers,"\n")
for line in headers:
[header, header_value]=string.split(line,": ",1)
if header=="Status":
[code,message]=string.split(header_value," ",1)
self.request.reply_code=string.atoi(code)
elif header=="Bobo-Exception-Type" and \
header_value=="exceptions.SystemExit":
self.exit=1
else:
self.request[header]=header_value
self.buffer=self.request.build_reply_header()+html
del self.request
self.done=1
class exit_producer:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def more(self):
raise SystemExit
# perhaps there could be a more graceful shutdown.
asyncore.close_all()
\ No newline at end of file
......@@ -9,7 +9,7 @@
# interested in using this software in a commercial context, or in
# purchasing support, please contact the author.
RCS_ID = '$Id: http_server.py,v 1.3 1999/01/15 02:27:04 amos Exp $'
RCS_ID = '$Id: http_server.py,v 1.4 1999/01/18 22:44:49 amos Exp $'
# python modules
import os
......@@ -301,6 +301,7 @@ class http_channel (asynchat.async_chat):
current_request = None
channel_counter = counter()
writable=asynchat.async_chat.writable_future
def __init__ (self, server, conn, addr):
self.channel_number = http_channel.channel_counter.increment()
......@@ -418,11 +419,7 @@ class http_channel (asynchat.async_chat):
return
# no handlers, so complain
r.error (404)
def writable (self):
# this is just the normal async_chat 'writable', here for comparison
return self.ac_out_buffer or len(self.producer_fifo)
r.error (404)
def writable_for_proxy (self):
# this version of writable supports the idea of a 'stalled' producer
......@@ -598,32 +595,13 @@ def crack_request (r):
version = None
return string.lower (REQUEST.group (1)), REQUEST.group(2), version
class fifo:
def __init__ (self, list=None):
if not list:
self.list = []
else:
self.list = list
def __len__ (self):
return len(self.list)
def first (self):
return self.list[0]
class fifo(asynchat.fifo):
def push_front (self, object):
self.list.insert (0, object)
def push (self, data):
self.list.append (data)
def pop (self):
if self.list:
result = self.list[0]
del self.list[0]
return (1, result)
else:
return (0, None)
def compute_timezone_for_log ():
if time.daylight:
......
# -*- Mode: Python; tab-width: 4 -*-
RCS_ID = '$Id: producers.py,v 1.3 1999/01/15 02:27:52 amos Exp $'
RCS_ID = '$Id: producers.py,v 1.4 1999/01/18 22:45:22 amos Exp $'
import string
......@@ -11,8 +11,37 @@ in various ways to get interesting and useful behaviors.
For example, you can feed dynamically-produced output into the compressing
producer, then wrap this with the 'chunked' transfer-encoding producer.
Producer Interface:
All producers have a 'more' method. 'more' returns a string of output.
'more' can be called multiple times. When 'more' returns '', the producer
is exhausted.
Optional Future Producer Interface:
The future producer interface adds a 'ready' method to producers. This
allows future producers which may not be ready until after they are
created. Returning false means that a call to 'more' will not give you
useful information, right now, but will later. When a producer is exhausted,
it should return true for 'ready'. Producers which are not ready should raise
'NotReady' when their 'more' method is called.
Note: Not having a 'ready' method implies that a producer is always ready.
Note: Composite producers will probably have to consult their sub-produces
to ensure readiness.
Note: If you don't wish to use future producers nothing changes. Simply don't
call a producer's ready method. Everything works as before.
"""
class NotReady(Exception):
"""Raised by future producers when their more method is called
when they are not ready."""
pass
class simple_producer:
"producer for a string"
def __init__ (self, data, buffer_size=1024):
......@@ -55,9 +84,6 @@ class lines_producer:
def __init__ (self, lines):
self.lines = lines
def ready (self):
return len(self.lines)
def more (self):
if self.lines:
chunk = self.lines[:50]
......@@ -89,6 +115,7 @@ class file_producer:
else:
return data
# A simple output producer. This one does not [yet] have
# the safety feature builtin to the monitor channel: runaway
# output will not be caught.
......@@ -98,9 +125,11 @@ class file_producer:
class output_producer:
"Acts like an output file; suitable for capturing sys.stdout"
def __init__ (self):
self.data = ''
self.closed = None
def write (self, data):
lines = string.splitfields (data, '\n')
data = string.join (lines, '\r\n')
......@@ -116,7 +145,7 @@ class output_producer:
) + '\r\n'
def ready (self):
return (len (self.data) > 0)
return (len (self.data) > 0) or self.closed
def flush (self):
pass
......@@ -131,13 +160,26 @@ class output_producer:
return result
else:
return ''
def close(self):
self.closed=1
class composite_producer:
"combine a fifo of producers into one"
def __init__ (self, producers):
self.producers = producers
self.buffer = ''
def more (self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
# we should only get here when not
# using the ready option
while len(self.producers):
p = self.producers.first()
d = p.more()
......@@ -148,6 +190,21 @@ class composite_producer:
else:
return ''
def ready(self):
# This producer requires a buffer to ensure
# that it really is ready when it says so
if self.buffer or len(self.producers)==0:
return 1
while self.producers.ready():
p = self.producers.first()
d = p.more()
if d:
self.buffer=d
else:
self.producers.pop()
if self.buffer or len(self.producers)==0:
return 1
class globbing_producer:
"""
......@@ -172,6 +229,13 @@ class globbing_producer:
self.buffer = ''
return r
def ready(self):
# XXX doesn't in fact guarentee ready. Should probably
# redo this one like the composite producer... But
# it's not a big deal, cause refill_buffer will
# catch the NotReady exception...
return not hasattr(self.producer,'ready') or self.producer.ready()
class hooked_producer:
"""
......@@ -197,6 +261,9 @@ class hooked_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
# HTTP 1.1 emphasizes that an advertised Content-Length header MUST be
# correct. In the face of Strange Files, it is conceivable that
# reading a 'file' may produce an amount of data not matching that
......@@ -236,6 +303,9 @@ class chunked_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
# Unfortunately this isn't very useful right now (Aug 97), because
# apparently the browsers don't do on-the-fly decompression. Which
# is sad, because this could _really_ speed things up, especially for
......@@ -279,6 +349,9 @@ class compressed_producer:
else:
return ''
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
class escaping_producer:
"A producer that escapes a sequence of characters"
......@@ -311,3 +384,7 @@ class escaping_producer:
return buffer
else:
return buffer
def ready(self):
return not hasattr(self.producer,'ready') or self.producer.ready()
......@@ -22,17 +22,15 @@ import thread
from PubCore import handle
from medusa import counter
from medusa import producers
from medusa import counter, producers
from medusa.default_handler import split_path, unquote, get_header
from medusa.producers import NotReady
HEADER_LINE = regex.compile ('\([A-Za-z0-9-]+\): \(.*\)')
CONTENT_LENGTH = regex.compile('Content-Length: \([0-9]+\)',regex.casefold)
CONNECTION = regex.compile ('Connection: \(.*\)', regex.casefold)
# maps request some headers to environment variables.
# (those that don't start with 'HTTP_')
#
header2env={'content-length' : 'CONTENT_LENGTH',
'content-type' : 'CONTENT_TYPE',
'connection' : 'CONNECTION_TYPE',
......@@ -40,7 +38,7 @@ header2env={'content-length' : 'CONTENT_LENGTH',
class zope_handler:
"Publishes a module with ZPublisher"
"publishes a module with ZPublisher"
# XXX add code to allow env overriding
......@@ -72,10 +70,12 @@ class zope_handler:
else:
return 0
def handle_request(self,request):
self.hits.increment()
if request.command in ["post","put"]:
size=get_header(CONTENT_LENGTH, request.header)
size=string.atoi(size)
if size > 1048576:
# write large upload data to a file
from tempfile import TemporaryFile
......@@ -83,7 +83,7 @@ class zope_handler:
else:
self.data = StringIO()
self.request = request
request.channel.set_terminator(string.atoi(size))
request.channel.set_terminator(size)
request.collector=self
else:
sin=StringIO()
......@@ -134,11 +134,56 @@ class zope_handler:
def continue_request(self,sin,request):
"continue handling request now that we have the stdin"
producer=header_scanning_producer(request)
pusher=file_pusher(producer)
outpipe=handle(self.module_name,
self.get_environment(request), sin, (pusher.push, ()))
outpipe=handle(self.module_name,self.get_environment(request),sin)
# now comes the hairy stuff. adapted from http_server
#
connection = string.lower(get_header(CONNECTION,request.header))
close_it = 0
wrap_in_chunking = 0
if request.version == '1.0':
if connection == 'keep-alive':
if not request.has_key ('Content-Length'):
close_it = 1
else:
request['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif request.version == '1.1':
if connection == 'close':
close_it = 1
elif not request.has_key ('Content-Length'):
if request.has_key ('Transfer-Encoding'):
if not request['Transfer-Encoding'] == 'chunked':
close_it = 1
elif request.use_chunked:
request['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
if close_it:
request['Connection'] = 'close'
outgoing_producer = header_scanning_producer(request,outpipe)
# apply a few final transformations to the output
request.channel.push_with_producer (
# hooking lets us log the number of bytes sent
producers.hooked_producer (
outgoing_producer,
request.log
)
)
request.channel.current_request = None
if close_it:
request.channel.close_when_done()
def status (self):
return producers.simple_producer("""
<li>Zope Handler
......@@ -150,7 +195,7 @@ class zope_handler:
# put and post collection methods
#
def collect_incoming_data(self, data):
def collect_incoming_data (self, data):
self.data.write(data)
def found_terminator(self):
......@@ -166,117 +211,50 @@ class zope_handler:
self.continue_request(d,r)
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class file_pusher:
"pushs data from one file into another file"
def __init__(self,out):
self.out=out
def push(self,file):
"push data from file to out. if file is exhausted, close out."
data=file.read()
if data != '':
self.out.write(data)
else:
self.out.close()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class header_scanning_producer:
"""This producer accepts data with the write method.
It scans the data, munges HTTP headers and pushes
it into a channel. When done it logs information."""
close_it=1
exit=None
def __init__ (self, request):
self.buffer = ''
self.request = request
self.channel = request.channel
self.got_header = 0
self.bytes_out = counter.counter()
def write (self, data):
if self.got_header:
self.push(data)
else:
self.buffer = self.buffer + data
i=string.find(self.buffer,'\n\n')
if i != -1:
self.got_header = 1
headers=string.split(self.buffer[:i],'\n')
self.push(self.build_header(headers))
self.push(self.buffer[i+2:])
self.buffer = ''
def build_header (self, lines):
status = '200 OK'
headers=[]
header_dict={}
for line in lines:
[header, header_value]=string.split(line,": ",1)
header_dict[header]=header_value
if header=="Status":
status=header_value
elif line:
headers.append(line)
self.request.reply_code=string.atoi(status[:3]) # for logging
headers.insert (0, 'HTTP/1.0 %s' % status)
headers.append ('Server: ' + self.request['Server'])
headers.append ('Date: ' + self.request['Date'])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection = string.lower(get_header
(CONNECTION, self.request.header))
if connection == 'keep-alive' and \
header_dict.has_key ('Content-Length'):
self.close_it=None
headers.append('Connection: Keep-Alive')
else:
headers.append ('Connection: close')
self.close_it=1
"""This weird producer patches together
medusa's idea of http headers with ZPublisher's
"""
def __init__(self,request,pipe):
self.request=request
self.pipe=pipe
self.done=None
self.buffer=''
self.exit=None
if header_dict.has_key('Bobo-Exception-Type') and \
header_dict['Bobo-Exception-Type']=='exceptions.SystemExit':
self.exit=1
return string.join (headers, '\r\n')+'\r\n\r\n'
def push(self, data):
self.bytes_out.increment(len(data))
self.channel.push(data)
def close (self):
self.request.log(int(self.bytes_out.as_long()))
self.request.channel.current_request = None
if self.exit:
self.channel.producer_fifo.push(exit_producer())
elif self.close_it:
self.channel.close_when_done()
# is this necessary?
del self.request
del self.channel
def more(self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
data=self.pipe.read()
if data is None:
raise NotReady()
return data
def ready(self):
if self.done:
return self.pipe.ready()
elif self.pipe.ready():
self.buffer=self.buffer+self.pipe.read()
if string.find(self.buffer,"\n\n"):
[headers,html]=string.split(self.buffer,"\n\n",1)
headers=string.split(headers,"\n")
for line in headers:
[header, header_value]=string.split(line,": ",1)
if header=="Status":
[code,message]=string.split(header_value," ",1)
self.request.reply_code=string.atoi(code)
elif header=="Bobo-Exception-Type" and \
header_value=="exceptions.SystemExit":
self.exit=1
else:
self.request[header]=header_value
self.buffer=self.request.build_reply_header()+html
del self.request
self.done=1
class exit_producer:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def more(self):
raise SystemExit
# perhaps there could be a more graceful shutdown.
asyncore.close_all()
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment