linkchecker/tests/test_robotparser.py

61 lines
2.1 KiB
Python
Raw Normal View History

# -*- coding: iso-8859-1 -*-
2012-06-10 12:58:38 +00:00
# Copyright (C) 2004-2012 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
2009-07-24 21:58:20 +00:00
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test robots.txt parsing.
"""
import unittest
2010-02-22 07:02:19 +00:00
from tests import need_network
2012-09-30 12:01:09 +00:00
from linkcheck import configuration, robotparser2
class TestRobotParser (unittest.TestCase):
"""
Test robots.txt parser (needs internet access).
"""
def setUp (self):
2012-09-30 12:01:09 +00:00
"""Initialize self.rp as a robots.txt parser."""
self.rp = robotparser2.RobotFileParser()
config = configuration.Configuration()
# uncomment for debugging
config.init_logging(None, debug=["all"])
def check (self, a, b):
2012-09-30 12:01:09 +00:00
"""Helper function comparing two results a and b."""
if not b:
ac = "access denied"
else:
ac = "access allowed"
if a != b:
self.fail("%s != %s (%s)" % (a, b, ac))
2010-02-22 07:02:19 +00:00
@need_network
def test_nonexisting_robots (self):
# robots.txt that does not exist
self.rp.set_url('http://www.lycos.com/robots.txt')
self.rp.read()
2012-09-30 12:01:09 +00:00
self.check(self.rp.can_fetch(configuration.UserAgent,
'http://www.lycos.com/search'), True)
2010-02-22 07:02:19 +00:00
@need_network
2012-06-10 11:19:30 +00:00
def test_disallowed_robots (self):
self.rp.set_url('http://google.com/robots.txt')
self.rp.read()
2012-09-30 12:01:09 +00:00
self.check(self.rp.can_fetch(configuration.UserAgent,
2012-06-10 11:19:30 +00:00
"http://google.com/search"), False)