make robots.txt deny an info instead of a warning

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@2555 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2005-05-04 10:48:48 +00:00
parent a39f635b82
commit 49a2dee338

View file

@ -62,6 +62,8 @@ class HttpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
self.has301status = False
self.no_anchor = False # remove anchor in request url
self.persistent = False
# URLs seen through 301/302 redirections
self.aliases = []
def allows_robots (self, url):
"""
@ -128,7 +130,7 @@ class HttpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
self.cookies = []
# check robots.txt
if not self.allows_robots(self.url):
self.add_warning(
self.add_info(
_("Access denied by robots.txt, checked only syntax."))
return
# check for amazon server quirk