Use generators instead of lists where possible

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@3739 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2008-04-28 00:26:02 +00:00
parent ff93d82541
commit 5d8bdaaa1f
14 changed files with 34 additions and 50 deletions

View file

@ -402,10 +402,9 @@ Use URL %s instead for checking."""), self.url, newurl)
minus redirection warnings.
"""
data = self.get_cache_data()
warns = [x for x in self.warnings if x[0] != "http-moved-permanent"]
data["warnings"] = warns
infos = [x for x in self.info if x[0] != "http-redirect"]
data["info"] = infos
data["warnings"] = [
x for x in self.warnings if x[0] != "http-moved-permanent"]
data["info"] = [x for x in self.info if x[0] != "http-redirect"]
return data
def check_response (self, response):

View file

@ -247,9 +247,8 @@ class MailtoUrl (urlbase.UrlBase):
"""
The cache key is a comma separated list of emails.
"""
emails = [addr[1] for addr in self.addresses]
emails.sort()
self.cache_url_key = u"%s:%s" % (self.scheme, u",".join(emails))
emails = u",".join(sorted(addr[1] for addr in self.addresses))
self.cache_url_key = u"%s:%s" % (self.scheme, emails)
assert isinstance(self.cache_url_key, unicode), self.cache_url_key
# cache_content_key remains None, recursion is not allowed

View file

@ -17,7 +17,7 @@
"""
Define standard test support classes funtional for LinkChecker tests.
"""
from __future__ import with_statement
import os
import re
import codecs
@ -175,11 +175,9 @@ class LinkCheckTest (unittest.TestCase):
d = {'curdir': get_file_url(os.getcwd()),
'datadir': get_file_url(get_file()),
}
f = codecs.open(resultfile, "r", "iso-8859-15")
resultlines = [line.rstrip('\r\n') % d for line in f \
if line.strip() and not line.startswith(u'#')]
f.close()
return resultlines
with codecs.open(resultfile, "r", "iso-8859-15") as f:
return [line.rstrip('\r\n') % d for line in f
if line.strip() and not line.startswith(u'#')]
def file_test (self, filename, confargs=None):
"""

View file

@ -171,8 +171,7 @@ def get_cookie (maxage=2000):
("Version", "1"),
("Foo", "Bar"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
return "; ".join(parts)
return "; ".join('%s="%s"' % (key, value) for key, value in data)
class CookieRedirectHttpRequestHandler (httptest.NoQueryHttpRequestHandler):

View file

@ -549,7 +549,7 @@ class UrlBase (object):
# break cyclic dependencies
handler.parser = None
parser.handler = None
if [x for x in handler.urls if x[0] == self.anchor]:
if any(x for x in handler.urls if x[0] == self.anchor):
return
self.add_warning(_("Anchor #%s not found.") % self.anchor,
tag=WARN_URL_ANCHOR_NOT_FOUND)

View file

@ -304,8 +304,7 @@ class Rfc2965Cookie (HttpCookie):
if "port" not in self.attributes:
return True
cport = self.attributes["port"]
ports = [int(x) for x in cport.split(",")]
return port in ports
return port in [int(x) for x in cport.split(",")]
def server_header_name (self):
return "Set-Cookie2"

View file

@ -118,9 +118,8 @@ def get_headers_lang (headers):
pass
pref_languages.append((pref, lang))
pref_languages.sort()
languages = [x[1] for x in pref_languages]
# search for lang
for lang in languages:
for lang in (x[1] for x in pref_languages):
if lang in supported_languages:
return lang
return default_language

View file

@ -222,9 +222,9 @@ class Logger (object):
parts = Fields.keys()
else:
parts = self.logparts
values = [self.part(x) for x in parts]
values = (self.part(x) for x in parts)
# maximum indent for localized log part names
self.max_indent = max([len(x) for x in values])+1
self.max_indent = max(len(x) for x in values)+1
for key in parts:
numspaces = (self.max_indent - len(self.part(key)))
self.logspaces[key] = u" " * numspaces

View file

@ -95,16 +95,16 @@ class CSVLogger (linkcheck.logger.Logger):
Write csv formatted url check info.
"""
row = []
for s in [url_data.base_url or u"", url_data.recursion_level,
for s in (url_data.base_url or u"", url_data.recursion_level,
url_data.parent_url or u"", url_data.base_ref or u"",
url_data.result,
os.linesep.join([x[1] for x in url_data.warnings]),
os.linesep.join([x[1] for x in url_data.info]),
os.linesep.join(x[1] for x in url_data.warnings),
os.linesep.join(x[1] for x in url_data.info),
url_data.valid, url_data.url or u"",
url_data.line, url_data.column,
url_data.name, url_data.dltime,
url_data.dlsize, url_data.checktime,
url_data.cached]:
url_data.cached):
if isinstance(s, unicode):
row.append(s.encode(self.output_encoding, "ignore"))
else:

View file

@ -242,7 +242,7 @@ class HtmlLogger (linkcheck.logger.Logger):
Write url_data.info.
"""
sep = u"<br>"+os.linesep
text = sep.join([cgi.escape(x[1]) for x in url_data.info])
text = sep.join(cgi.escape(x[1]) for x in url_data.info)
self.writeln(u'<tr><td valign="top">' + self.part("info")+
u"</td><td>"+text+u"</td></tr>")
@ -251,7 +251,7 @@ class HtmlLogger (linkcheck.logger.Logger):
Write url_data.warnings.
"""
sep = u"<br>"+os.linesep
text = sep.join([cgi.escape(x[1]) for x in url_data.warnings])
text = sep.join(cgi.escape(x[1]) for x in url_data.warnings)
self.writeln(u'<tr><td bgcolor="' + self.colorwarning + u'" '+
u'valign="top">' + self.part("warning") +
u'</td><td bgcolor="' + self.colorwarning + u'">' +

View file

@ -91,8 +91,8 @@ class SQLLogger (linkcheck.logger.Logger):
"""
Store url check info into the database.
"""
log_warnings = [x[1] for x in url_data.warnings]
log_infos = [x[1] for x in url_data.info]
log_warnings = (x[1] for x in url_data.warnings)
log_infos = (x[1] for x in url_data.info)
self.writeln(u"insert into %(table)s(urlname,recursionlevel,"
"parentname,baseref,valid,result,warning,info,url,line,col,"
"name,checktime,dltime,dlsize,cached) values ("

View file

@ -35,8 +35,7 @@ class TestCookies (unittest.TestCase):
("Path", "/"),
("Version", "1"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -53,8 +52,7 @@ class TestCookies (unittest.TestCase):
("Path", "/"),
("Version", "1"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -66,8 +64,7 @@ class TestCookies (unittest.TestCase):
("Foo", "Bar\""),
("Port", "hul,la"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -80,8 +77,7 @@ class TestCookies (unittest.TestCase):
("Domain", "localhost"),
("Port", "100,555,76"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -96,8 +92,7 @@ class TestCookies (unittest.TestCase):
("Path", "/"),
)
# note: values are without quotes
parts = ['%s=%s' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s=%s' % (key, value) for key, value in data)
scheme = "http"
host = "example.org"
path = "/"
@ -112,8 +107,7 @@ class TestCookies (unittest.TestCase):
("Path", "/"),
("Version", "1"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -130,8 +124,7 @@ class TestCookies (unittest.TestCase):
("Path", "/"),
("Version", "1"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -143,8 +136,7 @@ class TestCookies (unittest.TestCase):
("Foo", "Bar\""),
("Port", "hul,la"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"
@ -156,8 +148,7 @@ class TestCookies (unittest.TestCase):
("Foo", "Bar\""),
("Port", "100,555,76"),
)
parts = ['%s="%s"' % (key, value) for key, value in data]
value = "; ".join(parts)
value = "; ".join('%s="%s"' % (key, value) for key, value in data)
scheme = "http"
host = "localhost"
path = "/"

View file

@ -40,7 +40,7 @@ def trace_filter (patterns):
if patterns is None:
_trace_filter.clear()
else:
_trace_filter.update([re.compile(pat) for pat in patterns])
_trace_filter.update(re.compile(pat) for pat in patterns)
def _trace (frame, event, arg):

View file

@ -135,7 +135,7 @@ def parse_qsl (qs, keep_blank_values=0, strict_parsing=0):
name_value_amp = qs.split('&')
for name_value in name_value_amp:
if ';' in name_value:
pairs.extend([[x, ';'] for x in name_value.split(';')])
pairs.extend([x, ';'] for x in name_value.split(';'))
pairs[-1][1] = '&'
else:
pairs.append([name_value, '&'])