Commit 1106f02e authored by Tatuya Kamada's avatar Tatuya Kamada

Fix the problem that web checking result can be duplicated.

parent b0e585b9
...@@ -363,6 +363,25 @@ class HTTPCacheCheckerTestSuite(object): ...@@ -363,6 +363,25 @@ class HTTPCacheCheckerTestSuite(object):
(header, read_value, reference_value) (header, read_value, reference_value)
self.report_dict.setdefault(url, []).append(message) self.report_dict.setdefault(url, []).append(message)
def _isSameUrl(self, url):
"""
Return whether the url is already checked or not.
Example case):
http://example.com/login_form
http://example.com/login_form/
"""
if url in (None, ''):
return False
same_url = None
if url.endswith('/'):
same_url = url.rstrip('/')
else:
same_url = '%s/' % url
if same_url in self.report_dict:
return True
return False
def _parseWgetLogs(self, wget_log_file, discarded_url_list=_MARKER, def _parseWgetLogs(self, wget_log_file, discarded_url_list=_MARKER,
prohibited_file_name_list=None, prohibited_file_name_list=None,
prohibited_folder_name_list=None): prohibited_folder_name_list=None):
...@@ -386,6 +405,8 @@ class HTTPCacheCheckerTestSuite(object): ...@@ -386,6 +405,8 @@ class HTTPCacheCheckerTestSuite(object):
# URL already checked during first pass # URL already checked during first pass
logging.debug('%r Discarded' % url) logging.debug('%r Discarded' % url)
discarded = True discarded = True
elif self._isSameUrl(url):
discarded = True
if discarded: if discarded:
# keep reading wget process without doing anything # keep reading wget process without doing anything
continue continue
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment