2014-07-16 05:35:32 +00:00
|
|
|
# Copyright (C) 2004-2014 Bastian Kleineidam
|
2004-08-16 19:20:06 +00:00
|
|
|
#
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
2009-07-24 21:58:20 +00:00
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
2005-01-19 15:08:02 +00:00
|
|
|
"""
|
|
|
|
|
Test robots.txt parsing.
|
|
|
|
|
"""
|
2004-08-16 19:20:06 +00:00
|
|
|
|
|
|
|
|
import unittest
|
2010-02-22 07:02:19 +00:00
|
|
|
from tests import need_network
|
2012-09-30 12:01:09 +00:00
|
|
|
from linkcheck import configuration, robotparser2
|
2004-08-16 19:20:06 +00:00
|
|
|
|
2021-12-13 19:25:23 +00:00
|
|
|
import requests
|
|
|
|
|
|
2004-08-16 19:20:06 +00:00
|
|
|
|
2020-05-16 19:19:42 +00:00
|
|
|
class TestRobotParser(unittest.TestCase):
|
2005-01-19 14:38:01 +00:00
|
|
|
"""
|
|
|
|
|
Test robots.txt parser (needs internet access).
|
|
|
|
|
"""
|
2004-08-16 19:20:06 +00:00
|
|
|
|
2020-05-16 19:19:42 +00:00
|
|
|
def setUp(self):
|
2012-09-30 12:01:09 +00:00
|
|
|
"""Initialize self.rp as a robots.txt parser."""
|
2021-12-13 19:25:23 +00:00
|
|
|
self.rp = robotparser2.RobotFileParser(session=requests.Session())
|
2004-08-16 19:20:06 +00:00
|
|
|
|
2020-05-16 19:19:42 +00:00
|
|
|
def check(self, a, b):
|
2012-09-30 12:01:09 +00:00
|
|
|
"""Helper function comparing two results a and b."""
|
2004-08-16 19:20:06 +00:00
|
|
|
if not b:
|
|
|
|
|
ac = "access denied"
|
|
|
|
|
else:
|
|
|
|
|
ac = "access allowed"
|
|
|
|
|
if a != b:
|
|
|
|
|
self.fail("%s != %s (%s)" % (a, b, ac))
|
|
|
|
|
|
2010-02-22 07:02:19 +00:00
|
|
|
@need_network
|
2020-05-16 19:19:42 +00:00
|
|
|
def test_nonexisting_robots(self):
|
2004-08-16 19:20:06 +00:00
|
|
|
# robots.txt that does not exist
|
2020-05-28 19:29:13 +00:00
|
|
|
self.rp.set_url("http://www.lycos.com/robots.txt")
|
2004-08-16 19:20:06 +00:00
|
|
|
self.rp.read()
|
2020-05-28 19:29:13 +00:00
|
|
|
self.check(
|
|
|
|
|
self.rp.can_fetch(configuration.UserAgent, "http://www.lycos.com/search"),
|
|
|
|
|
True,
|
|
|
|
|
)
|
2004-08-16 19:20:06 +00:00
|
|
|
|
2010-02-22 07:02:19 +00:00
|
|
|
@need_network
|
2020-05-16 19:19:42 +00:00
|
|
|
def test_disallowed_robots(self):
|
2020-05-28 19:29:13 +00:00
|
|
|
self.rp.set_url("http://google.com/robots.txt")
|
2007-10-02 01:07:12 +00:00
|
|
|
self.rp.read()
|
2020-05-28 19:29:13 +00:00
|
|
|
self.check(
|
|
|
|
|
self.rp.can_fetch(configuration.UserAgent, "http://google.com/search"),
|
|
|
|
|
False,
|
|
|
|
|
)
|