mirror of
https://github.com/Hopiu/linkchecker.git
synced 2026-04-18 05:11:00 +00:00
make robots.txt deny an info instead of a warning
git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@2555 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
parent
a39f635b82
commit
49a2dee338
1 changed files with 3 additions and 1 deletions
|
|
@ -62,6 +62,8 @@ class HttpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
|
|||
self.has301status = False
|
||||
self.no_anchor = False # remove anchor in request url
|
||||
self.persistent = False
|
||||
# URLs seen through 301/302 redirections
|
||||
self.aliases = []
|
||||
|
||||
def allows_robots (self, url):
|
||||
"""
|
||||
|
|
@ -128,7 +130,7 @@ class HttpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
|
|||
self.cookies = []
|
||||
# check robots.txt
|
||||
if not self.allows_robots(self.url):
|
||||
self.add_warning(
|
||||
self.add_info(
|
||||
_("Access denied by robots.txt, checked only syntax."))
|
||||
return
|
||||
# check for amazon server quirk
|
||||
|
|
|
|||
Loading…
Reference in a new issue