From 4e56eceb358ae9e9c25833adbc44b761d321b586 Mon Sep 17 00:00:00 2001 From: Nicolas Bigaouette Date: Wed, 12 Nov 2014 09:58:30 -0500 Subject: [PATCH] Detect if "url_data" contains proxy attributes before using them. Fix proposed by @colwilson in issue #555. --- linkcheck/cache/robots_txt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linkcheck/cache/robots_txt.py b/linkcheck/cache/robots_txt.py index b33c0525..a5c7ab70 100644 --- a/linkcheck/cache/robots_txt.py +++ b/linkcheck/cache/robots_txt.py @@ -58,7 +58,7 @@ class RobotsTxt (object): return rp.can_fetch(self.useragent, url_data.url) self.misses += 1 kwargs = dict(auth=url_data.auth, session=url_data.session) - if url_data.proxy: + if hasattr(url_data, "proxy") and hasattr(url_data, "proxy_type"): kwargs["proxies"] = {url_data.proxytype: url_data.proxy} rp = robotparser2.RobotFileParser(**kwargs) rp.set_url(roboturl)