2004-08-16 19:20:06 +00:00
|
|
|
# -*- coding: iso-8859-1 -*-
|
2012-06-10 12:58:38 +00:00
|
|
|
# Copyright (C) 2004-2012 Bastian Kleineidam
|
2004-08-16 19:20:06 +00:00
|
|
|
#
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
2009-07-24 21:58:20 +00:00
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
2005-01-19 15:08:02 +00:00
|
|
|
"""
|
|
|
|
|
Test robots.txt parsing.
|
|
|
|
|
"""
|
2004-08-16 19:20:06 +00:00
|
|
|
|
|
|
|
|
import unittest
|
2010-02-22 07:02:19 +00:00
|
|
|
from tests import need_network
|
2012-09-30 12:01:09 +00:00
|
|
|
from linkcheck import configuration, robotparser2
|
2004-08-16 19:20:06 +00:00
|
|
|
|
|
|
|
|
|
2006-04-24 20:16:57 +00:00
|
|
|
class TestRobotParser (unittest.TestCase):
|
2005-01-19 14:38:01 +00:00
|
|
|
"""
|
|
|
|
|
Test robots.txt parser (needs internet access).
|
|
|
|
|
"""
|
2004-08-16 19:20:06 +00:00
|
|
|
|
|
|
|
|
def setUp (self):
|
2012-09-30 12:01:09 +00:00
|
|
|
"""Initialize self.rp as a robots.txt parser."""
|
|
|
|
|
self.rp = robotparser2.RobotFileParser()
|
|
|
|
|
config = configuration.Configuration()
|
|
|
|
|
# uncomment for debugging
|
|
|
|
|
config.init_logging(None, debug=["all"])
|
2004-08-16 19:20:06 +00:00
|
|
|
|
|
|
|
|
def check (self, a, b):
|
2012-09-30 12:01:09 +00:00
|
|
|
"""Helper function comparing two results a and b."""
|
2004-08-16 19:20:06 +00:00
|
|
|
if not b:
|
|
|
|
|
ac = "access denied"
|
|
|
|
|
else:
|
|
|
|
|
ac = "access allowed"
|
|
|
|
|
if a != b:
|
|
|
|
|
self.fail("%s != %s (%s)" % (a, b, ac))
|
|
|
|
|
|
2010-02-22 07:02:19 +00:00
|
|
|
@need_network
|
2004-08-16 19:20:06 +00:00
|
|
|
def test_nonexisting_robots (self):
|
|
|
|
|
# robots.txt that does not exist
|
|
|
|
|
self.rp.set_url('http://www.lycos.com/robots.txt')
|
|
|
|
|
self.rp.read()
|
2012-09-30 12:01:09 +00:00
|
|
|
self.check(self.rp.can_fetch(configuration.UserAgent,
|
2004-08-16 19:20:06 +00:00
|
|
|
'http://www.lycos.com/search'), True)
|
|
|
|
|
|
2010-02-22 07:02:19 +00:00
|
|
|
@need_network
|
2012-06-10 11:19:30 +00:00
|
|
|
def test_disallowed_robots (self):
|
|
|
|
|
self.rp.set_url('http://google.com/robots.txt')
|
2007-10-02 01:07:12 +00:00
|
|
|
self.rp.read()
|
2012-09-30 12:01:09 +00:00
|
|
|
self.check(self.rp.can_fetch(configuration.UserAgent,
|
2012-06-10 11:19:30 +00:00
|
|
|
"http://google.com/search"), False)
|