Commit 521ddffb authored by Amos Latteier's avatar Amos Latteier

changed to use thread_handler style output. bagged the future producer

stuff. Also a couple misc tweaks. SystemExit is still not done.
parent 6146e717
"""
Zope Medusa Handler based on bobo_handler.py
Zope Medusa Handler
Uses a new threaded architecture.
......@@ -23,25 +23,24 @@ import thread
from PubCore import handle
from medusa import counter
from medusa import default_handler
from medusa import producers
from medusa.default_handler import split_path, unquote, get_header
split_path = default_handler.split_path
unquote = default_handler.unquote
get_header = default_handler.get_header
HEADER_LINE = regex.compile ('\([A-Za-z0-9-]+\): \(.*\)')
CONTENT_LENGTH = regex.compile('Content-Length: \([0-9]+\)',regex.casefold)
CONNECTION = regex.compile ('Connection: \(.*\)', regex.casefold)
# maps request headers to environment variables
#
# maps request some headers to environment variables.
# (those that don't start with 'HTTP_')
#
header2env={'content-length' : 'CONTENT_LENGTH',
'content-type' : 'CONTENT_TYPE'
'content-type' : 'CONTENT_TYPE',
'connection' : 'CONNECTION_TYPE',
}
class zope_handler:
"publishes a module with ZPublisher"
"Publishes a module with ZPublisher"
# XXX add code to allow env overriding
......@@ -76,10 +75,15 @@ class zope_handler:
def handle_request(self,request):
self.hits.increment()
if request.command in ["post","put"]:
cl=get_header(CONTENT_LENGTH, request.header)
self.data = StringIO() # XXX maybe use tempfile, if cl is large
size=get_header(CONTENT_LENGTH, request.header)
if size > 1048576:
# write large upload data to a file
from tempfile import TemporaryFile
self.data = TemporaryFile('w+b')
else:
self.data = StringIO()
self.request = request
request.channel.set_terminator(string.atoi(cl))
request.channel.set_terminator(string.atoi(size))
request.collector=self
else:
sin=StringIO()
......@@ -112,74 +116,29 @@ class zope_handler:
env['PATH_INFO'] = path_info
env['GATEWAY_INTERFACE']='CGI/1.1'
env['REMOTE_ADDR']=request.channel.addr[0]
#env['REMOTE_HOST']=request.channel.addr[0] #what should this be?
# XXX 'REMOTE_HOST' is missing. Should it be
# retrieved from the resolver somehow?
for header in request.header:
[key,value]=string.split(header,": ",1)
key=string.lower(key)
if header2env.has_key(key) and value:
env[header2env[key]]=value
else:
key='HTTP_'+string.upper(string.join(string.split(key,"-"),"_"))
key='HTTP_' + string.upper(
string.join(string.split(key, "-"), "_"))
if value and not env.has_key(key):
env[key]=value
return env
def continue_request(self,sin,request):
"continue handling request now that we have the stdin"
outpipe=handle(self.module_name,self.get_environment(request),sin)
# now comes the hairy stuff. adapted from http_server
#
connection = string.lower(get_header(CONNECTION,request.header))
close_it = 0
wrap_in_chunking = 0
if request.version == '1.0':
if connection == 'keep-alive':
if not request.has_key ('Content-Length'):
close_it = 1
else:
request['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif request.version == '1.1':
if connection == 'close':
close_it = 1
elif not request.has_key ('Content-Length'):
if request.has_key ('Transfer-Encoding'):
if not request['Transfer-Encoding'] == 'chunked':
close_it = 1
elif request.use_chunked:
request['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
if close_it:
request['Connection'] = 'close'
outgoing_producer = crazy_producer(request,outpipe)
# apply a few final transformations to the output
request.channel.push_with_producer (
# globbing gives us large packets
producers.globbing_producer (
# hooking lets us log the number of bytes sent
producers.hooked_producer (
outgoing_producer,
request.log
)
)
)
request.channel.current_request = None
if close_it:
request.channel.close_when_done()
producer=header_scanning_producer(request)
pusher=file_pusher(producer)
outpipe=handle(self.module_name,
self.get_environment(request), sin, (pusher.push, ()))
def status (self):
return producers.simple_producer("""
<li>Zope Handler
......@@ -191,7 +150,7 @@ class zope_handler:
# put and post collection methods
#
def collect_incoming_data (self, data):
def collect_incoming_data(self, data):
self.data.write(data)
def found_terminator(self):
......@@ -207,57 +166,117 @@ class zope_handler:
self.continue_request(d,r)
class pipe_producer:
def __init__(self,pipe):
self.pipe=pipe
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class file_pusher:
"pushs data from one file into another file"
def __init__(self,out):
self.out=out
def push(self,file):
"push data from file to out. if file is exhausted, close out."
data=file.read()
if data != '':
self.out.write(data)
else:
self.out.close()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class header_scanning_producer:
"""This producer accepts data with the write method.
It scans the data, munges HTTP headers and pushes
it into a channel. When done it logs information."""
close_it=1
exit=None
def __init__ (self, request):
self.buffer = ''
self.request = request
self.channel = request.channel
self.got_header = 0
self.bytes_out = counter.counter()
def write (self, data):
if self.got_header:
self.push(data)
else:
self.buffer = self.buffer + data
i=string.find(self.buffer,'\n\n')
if i != -1:
self.got_header = 1
headers=string.split(self.buffer[:i],'\n')
self.push(self.build_header(headers))
self.push(self.buffer[i+2:])
self.buffer = ''
def build_header (self, lines):
status = '200 OK'
headers=[]
header_dict={}
for line in lines:
[header, header_value]=string.split(line,": ",1)
header_dict[header]=header_value
if header=="Status":
status=header_value
elif line:
headers.append(line)
self.request.reply_code=string.atoi(status[:3]) # for logging
def more(self):
#if not self.ready():
# import traceback
# traceback.print_stack()
data=self.pipe.read()
if data is None: return ''
return data
def ready(self):
return self.pipe.ready()
class crazy_producer:
"""This weird producer patches together
medusa's idea of http headers with ZPublisher's
"""
def __init__(self,request,pipe):
self.request=request
self.pipe=pipe
self.done=None
self.buffer=''
headers.insert (0, 'HTTP/1.0 %s' % status)
headers.append ('Server: ' + self.request['Server'])
headers.append ('Date: ' + self.request['Date'])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection = string.lower(get_header
(CONNECTION, self.request.header))
if connection == 'keep-alive' and \
header_dict.has_key ('Content-Length'):
self.close_it=None
headers.append('Connection: Keep-Alive')
else:
headers.append ('Connection: close')
self.close_it=1
if header_dict.has_key('Bobo-Exception-Type') and \
header_dict['Bobo-Exception-Type']=='exceptions.SystemExit':
self.exit=1
return string.join (headers, '\r\n')+'\r\n\r\n'
def push(self, data):
self.bytes_out.increment(len(data))
self.channel.push(data)
def close (self):
self.request.log(int(self.bytes_out.as_long()))
self.request.channel.current_request = None
if self.exit:
self.channel.producer_fifo.push(exit_producer())
elif self.close_it:
self.channel.close_when_done()
# is this necessary?
del self.request
del self.channel
class exit_producer:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def more(self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
data=self.pipe.read()
if data is None: return ''
return data
def ready(self):
if self.done:
return self.pipe.ready()
elif self.pipe.ready():
self.buffer=self.buffer+self.pipe.read()
if string.find(self.buffer,"\n\n"):
[headers,html]=string.split(self.buffer,"\n\n",1)
headers=string.split(headers,"\n")
for line in headers:
[header, header_value]=string.split(line,": ",1)
if header=="Status":
[code,message]=string.split(header_value," ",1)
self.request.reply_code=string.atoi(code)
else:
self.request[header]=header_value
self.buffer=self.request.build_reply_header()+html
del self.request
self.done=1
raise SystemExit
"""
Zope Medusa Handler based on bobo_handler.py
Zope Medusa Handler
Uses a new threaded architecture.
......@@ -23,25 +23,24 @@ import thread
from PubCore import handle
from medusa import counter
from medusa import default_handler
from medusa import producers
from medusa.default_handler import split_path, unquote, get_header
split_path = default_handler.split_path
unquote = default_handler.unquote
get_header = default_handler.get_header
HEADER_LINE = regex.compile ('\([A-Za-z0-9-]+\): \(.*\)')
CONTENT_LENGTH = regex.compile('Content-Length: \([0-9]+\)',regex.casefold)
CONNECTION = regex.compile ('Connection: \(.*\)', regex.casefold)
# maps request headers to environment variables
#
# maps request some headers to environment variables.
# (those that don't start with 'HTTP_')
#
header2env={'content-length' : 'CONTENT_LENGTH',
'content-type' : 'CONTENT_TYPE'
'content-type' : 'CONTENT_TYPE',
'connection' : 'CONNECTION_TYPE',
}
class zope_handler:
"publishes a module with ZPublisher"
"Publishes a module with ZPublisher"
# XXX add code to allow env overriding
......@@ -76,10 +75,15 @@ class zope_handler:
def handle_request(self,request):
self.hits.increment()
if request.command in ["post","put"]:
cl=get_header(CONTENT_LENGTH, request.header)
self.data = StringIO() # XXX maybe use tempfile, if cl is large
size=get_header(CONTENT_LENGTH, request.header)
if size > 1048576:
# write large upload data to a file
from tempfile import TemporaryFile
self.data = TemporaryFile('w+b')
else:
self.data = StringIO()
self.request = request
request.channel.set_terminator(string.atoi(cl))
request.channel.set_terminator(string.atoi(size))
request.collector=self
else:
sin=StringIO()
......@@ -112,74 +116,29 @@ class zope_handler:
env['PATH_INFO'] = path_info
env['GATEWAY_INTERFACE']='CGI/1.1'
env['REMOTE_ADDR']=request.channel.addr[0]
#env['REMOTE_HOST']=request.channel.addr[0] #what should this be?
# XXX 'REMOTE_HOST' is missing. Should it be
# retrieved from the resolver somehow?
for header in request.header:
[key,value]=string.split(header,": ",1)
key=string.lower(key)
if header2env.has_key(key) and value:
env[header2env[key]]=value
else:
key='HTTP_'+string.upper(string.join(string.split(key,"-"),"_"))
key='HTTP_' + string.upper(
string.join(string.split(key, "-"), "_"))
if value and not env.has_key(key):
env[key]=value
return env
def continue_request(self,sin,request):
"continue handling request now that we have the stdin"
outpipe=handle(self.module_name,self.get_environment(request),sin)
# now comes the hairy stuff. adapted from http_server
#
connection = string.lower(get_header(CONNECTION,request.header))
close_it = 0
wrap_in_chunking = 0
if request.version == '1.0':
if connection == 'keep-alive':
if not request.has_key ('Content-Length'):
close_it = 1
else:
request['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif request.version == '1.1':
if connection == 'close':
close_it = 1
elif not request.has_key ('Content-Length'):
if request.has_key ('Transfer-Encoding'):
if not request['Transfer-Encoding'] == 'chunked':
close_it = 1
elif request.use_chunked:
request['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
if close_it:
request['Connection'] = 'close'
outgoing_producer = crazy_producer(request,outpipe)
# apply a few final transformations to the output
request.channel.push_with_producer (
# globbing gives us large packets
producers.globbing_producer (
# hooking lets us log the number of bytes sent
producers.hooked_producer (
outgoing_producer,
request.log
)
)
)
request.channel.current_request = None
if close_it:
request.channel.close_when_done()
producer=header_scanning_producer(request)
pusher=file_pusher(producer)
outpipe=handle(self.module_name,
self.get_environment(request), sin, (pusher.push, ()))
def status (self):
return producers.simple_producer("""
<li>Zope Handler
......@@ -191,7 +150,7 @@ class zope_handler:
# put and post collection methods
#
def collect_incoming_data (self, data):
def collect_incoming_data(self, data):
self.data.write(data)
def found_terminator(self):
......@@ -207,57 +166,117 @@ class zope_handler:
self.continue_request(d,r)
class pipe_producer:
def __init__(self,pipe):
self.pipe=pipe
# The pusher is used to push data out of the pipe into the
# header_scanning_producer
class file_pusher:
"pushs data from one file into another file"
def __init__(self,out):
self.out=out
def push(self,file):
"push data from file to out. if file is exhausted, close out."
data=file.read()
if data != '':
self.out.write(data)
else:
self.out.close()
# The header_scanning_producer accepts data from
# the output pipe (via the file pusher). Then it
# munges the data and pushes it into the channel.
class header_scanning_producer:
"""This producer accepts data with the write method.
It scans the data, munges HTTP headers and pushes
it into a channel. When done it logs information."""
close_it=1
exit=None
def __init__ (self, request):
self.buffer = ''
self.request = request
self.channel = request.channel
self.got_header = 0
self.bytes_out = counter.counter()
def write (self, data):
if self.got_header:
self.push(data)
else:
self.buffer = self.buffer + data
i=string.find(self.buffer,'\n\n')
if i != -1:
self.got_header = 1
headers=string.split(self.buffer[:i],'\n')
self.push(self.build_header(headers))
self.push(self.buffer[i+2:])
self.buffer = ''
def build_header (self, lines):
status = '200 OK'
headers=[]
header_dict={}
for line in lines:
[header, header_value]=string.split(line,": ",1)
header_dict[header]=header_value
if header=="Status":
status=header_value
elif line:
headers.append(line)
self.request.reply_code=string.atoi(status[:3]) # for logging
def more(self):
#if not self.ready():
# import traceback
# traceback.print_stack()
data=self.pipe.read()
if data is None: return ''
return data
def ready(self):
return self.pipe.ready()
class crazy_producer:
"""This weird producer patches together
medusa's idea of http headers with ZPublisher's
"""
def __init__(self,request,pipe):
self.request=request
self.pipe=pipe
self.done=None
self.buffer=''
headers.insert (0, 'HTTP/1.0 %s' % status)
headers.append ('Server: ' + self.request['Server'])
headers.append ('Date: ' + self.request['Date'])
# XXX add stuff to determine chunking and
# 'Transfer-Encoding'
# XXX is this keep alive logic right?
connection = string.lower(get_header
(CONNECTION, self.request.header))
if connection == 'keep-alive' and \
header_dict.has_key ('Content-Length'):
self.close_it=None
headers.append('Connection: Keep-Alive')
else:
headers.append ('Connection: close')
self.close_it=1
if header_dict.has_key('Bobo-Exception-Type') and \
header_dict['Bobo-Exception-Type']=='exceptions.SystemExit':
self.exit=1
return string.join (headers, '\r\n')+'\r\n\r\n'
def push(self, data):
self.bytes_out.increment(len(data))
self.channel.push(data)
def close (self):
self.request.log(int(self.bytes_out.as_long()))
self.request.channel.current_request = None
if self.exit:
self.channel.producer_fifo.push(exit_producer())
elif self.close_it:
self.channel.close_when_done()
# is this necessary?
del self.request
del self.channel
class exit_producer:
# XXX this is not currently sufficient.
# needs to be enhanced to actually work.
def more(self):
if self.buffer:
b=self.buffer
self.buffer=''
return b
data=self.pipe.read()
if data is None: return ''
return data
def ready(self):
if self.done:
return self.pipe.ready()
elif self.pipe.ready():
self.buffer=self.buffer+self.pipe.read()
if string.find(self.buffer,"\n\n"):
[headers,html]=string.split(self.buffer,"\n\n",1)
headers=string.split(headers,"\n")
for line in headers:
[header, header_value]=string.split(line,": ",1)
if header=="Status":
[code,message]=string.split(header_value," ",1)
self.request.reply_code=string.atoi(code)
else:
self.request[header]=header_value
self.buffer=self.request.build_reply_header()+html
del self.request
self.done=1
raise SystemExit
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment