Source code cleanup: use or remove unused variables

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@3724 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2008-04-25 07:49:52 +00:00
parent e266a65b64
commit 973da91f44
12 changed files with 27 additions and 8 deletions

View file

@ -102,6 +102,8 @@ class FileUrl (urlbase.UrlBase):
# norm base url again after changing
if self.base_url != base_url:
base_url, is_idn = linkcheck.checker.urlbase.url_norm(base_url)
if is_idn:
pass # XXX warn about idn use
self.base_url = unicode(base_url)
def build_url (self):

View file

@ -66,7 +66,7 @@ def http_timeout (response):
if timeout is not None:
try:
timeout = int(timeout[8:].strip())
except ValueError, msg:
except ValueError:
timeout = DEFAULT_TIMEOUT_SECS
else:
timeout = DEFAULT_TIMEOUT_SECS

View file

@ -328,6 +328,8 @@ Use URL %s instead for checking."""), self.url, newurl)
self.add_info(_("Redirected to %(url)s.") % {'url': newurl})
# norm base url - can raise UnicodeError from url.idna_encode()
redirected, is_idn = linkcheck.checker.urlbase.url_norm(newurl)
if is_idn:
pass # XXX warn about idn use
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
"Norm redirected to %r", redirected)
urlparts = linkcheck.strformat.url_unicode_split(redirected)
@ -563,8 +565,7 @@ Use URL %s instead for checking."""), self.url, newurl)
if self.data is None:
self.method = "GET"
response = self._get_http_response()
tries, response = self.follow_redirections(response,
set_result=False)
response = self.follow_redirections(response, set_result=False)[1]
self.headers = response.msg
self._read_content(response)
if self.data is None:

View file

@ -41,9 +41,11 @@ class InternPatternUrl (urlbase.UrlBase):
scheme = parts[0]
domain = parts[1]
domain, is_idn = linkcheck.url.idna_encode(domain)
if is_idn:
pass # XXX warn about idn use
if not (domain and scheme):
return None
path, params = linkcheck.url.splitparams(parts[2])
path = linkcheck.url.splitparams(parts[2])[0]
segments = path.split('/')[:-1]
path = "/".join(segments)
if url.endswith('/'):

View file

@ -26,6 +26,7 @@ import unittest
import linkcheck.checker
import linkcheck.configuration
import linkcheck.director
import linkcheck.logger
# helper alias

View file

@ -683,7 +683,7 @@ class UrlBase (object):
log.addHandler(handler)
log.setLevel(logging.WARN)
cssparser = cssutils.CSSParser(log=log)
sheet = cssparser.parseString(self.get_content(), href=self.url)
cssparser.parseString(self.get_content(), href=self.url)
for record in handler.storage:
self.add_warning("cssutils: %s" % record.getMessage())
except:

View file

@ -48,7 +48,7 @@ class Aggregate (object):
self.threads.append(t)
num = self.config["threads"]
if num >= 1:
for i in xrange(num):
for dummy in xrange(num):
t = checker.Checker(self.urlqueue, self.logger)
t.start()
self.threads.append(t)

View file

@ -53,7 +53,7 @@ class Logger (object):
Send new url to all configured loggers.
"""
has_warnings = False
for tag, content in url_data.warnings:
for tag, dummy in url_data.warnings:
if tag not in self.ignorewarnings:
has_warnings = True
break

View file

@ -284,6 +284,7 @@ class TestCaselessSortedDict (unittest.TestCase):
prev = None
for key, value in self.d.items():
self.assert_(key > prev)
self.assertEqual(value, self.d[key])
prev = key

View file

@ -86,6 +86,7 @@ class TestCookies (unittest.TestCase):
host = "localhost"
path = "/"
cookie = linkcheck.cookies.NetscapeCookie(value, scheme, host, path)
self.assert_(cookie.is_valid_for("http", host, 100, "/"))
def test_netscape_cookie5 (self):
data = (
@ -161,6 +162,7 @@ class TestCookies (unittest.TestCase):
host = "localhost"
path = "/"
cookie = linkcheck.cookies.Rfc2965Cookie(value, scheme, host, path)
self.assert_(cookie.is_valid_for("http", host, 100, "/"))
def test_cookie_parse1 (self):
lines = [
@ -171,15 +173,23 @@ class TestCookies (unittest.TestCase):
]
from_headers = linkcheck.cookies.from_headers
headers, scheme, host, path = from_headers("\r\n".join(lines))
self.assertEqual(scheme, "http")
self.assertEqual(host, "example.org")
self.assertEqual(path, "/hello")
self.assertEqual(len(headers), 4)
def test_cookie_parse2 (self):
lines = [
'Scheme: https',
'Host: imaweevil.org',
'Host: example.org',
'Set-cookie: baggage="elitist"; comment="hologram"',
]
from_headers = linkcheck.cookies.from_headers
headers, scheme, host, path = from_headers("\r\n".join(lines))
self.assertEqual(scheme, "https")
self.assertEqual(host, "example.org")
self.assertEqual(path, "/")
self.assertEqual(len(headers), 3)
def test_cookie_parse3 (self):
lines = [

View file

@ -264,6 +264,7 @@ class TestRobotsTxt (unittest.TestCase):
"User-Agent: *",
"Disallow: /.",
]
self.rp.parse(lines)
good = ['/foo.html']
bad = [] # Bug report says "/" should be denied, but that is not in the RFC
self.check_urls(good, bad)

View file

@ -523,6 +523,7 @@ class TestUrl (unittest.TestCase):
idna_encode =linkcheck.url.idna_encode
encurl, is_idn = idna_encode(url)
self.assert_(is_idn)
self.assertTrue(encurl)
url = u''
encurl, is_idn = idna_encode(url)
self.assertFalse(is_idn)