use relative imports

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@3335 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2006-06-01 14:06:19 +00:00
parent 338c6ac69c
commit a57618a4ad
5 changed files with 60 additions and 60 deletions

View file

@ -26,7 +26,7 @@ if not hasattr(sys, 'version_info') or \
import os
import re
import linkcheck.i18n
import i18n
import _linkchecker_configdata as configdata
# application log areas
@ -48,7 +48,7 @@ lognames = {
}
lognamelist = ", ".join(["%r"%name for name in lognames.iterkeys()])
import linkcheck.log
import log
class LinkCheckerError (StandardError):
@ -64,7 +64,7 @@ def add_intern_pattern (url_data, config):
"""
pat = url_data.get_intern_pattern()
if pat:
assert None == linkcheck.log.debug(LOG_CHECK,
assert None == log.debug(LOG_CHECK,
"Add intern pattern %r from command line", pat)
config['internlinks'].append(get_link_pat(pat))
@ -81,7 +81,7 @@ def get_link_pat (arg, strict=False):
@return: dictionary with keys 'pattern', 'negate' and 'strict'
@rtype: dict
"""
assert None == linkcheck.log.debug(LOG_CHECK, "Link pattern %r", arg)
assert None == log.debug(LOG_CHECK, "Link pattern %r", arg)
if arg.startswith('!'):
pattern = arg[1:]
negate = True
@ -96,30 +96,30 @@ def get_link_pat (arg, strict=False):
# note: don't confuse URL loggers with application logs above
import linkcheck.logger.text
import linkcheck.logger.html
import linkcheck.logger.gml
import linkcheck.logger.dot
import linkcheck.logger.sql
import linkcheck.logger.csvlog
import linkcheck.logger.blacklist
import linkcheck.logger.gxml
import linkcheck.logger.customxml
import linkcheck.logger.none
import logger.text
import logger.html
import logger.gml
import logger.dot
import logger.sql
import logger.csvlog
import logger.blacklist
import logger.gxml
import logger.customxml
import logger.none
# default link logger classes
Loggers = {
"text": linkcheck.logger.text.TextLogger,
"html": linkcheck.logger.html.HtmlLogger,
"gml": linkcheck.logger.gml.GMLLogger,
"dot": linkcheck.logger.dot.DOTLogger,
"sql": linkcheck.logger.sql.SQLLogger,
"csv": linkcheck.logger.csvlog.CSVLogger,
"blacklist": linkcheck.logger.blacklist.BlacklistLogger,
"gxml": linkcheck.logger.gxml.GraphXMLLogger,
"xml": linkcheck.logger.customxml.CustomXMLLogger,
"none": linkcheck.logger.none.NoneLogger,
"text": logger.text.TextLogger,
"html": logger.html.HtmlLogger,
"gml": logger.gml.GMLLogger,
"dot": logger.dot.DOTLogger,
"sql": logger.sql.SQLLogger,
"csv": logger.csvlog.CSVLogger,
"blacklist": logger.blacklist.BlacklistLogger,
"gxml": logger.gxml.GraphXMLLogger,
"xml": logger.customxml.CustomXMLLogger,
"none": logger.none.NoneLogger,
}
# for easy printing: a comma separated logger list
LoggerKeys = ", ".join(["%r"%name for name in Loggers.iterkeys()])
@ -135,7 +135,7 @@ def init_i18n ():
locdir = os.environ.get('LOCPATH')
if locdir is None:
locdir = os.path.join(configdata.install_data, 'share', 'locale')
linkcheck.i18n.init(configdata.name, locdir)
i18n.init(configdata.name, locdir)
# install translated log level names
import logging
logging.addLevelName(logging.CRITICAL, _('CRITICAL'))

View file

@ -33,7 +33,7 @@ import Cookie
import cookielib
import cStringIO as StringIO
import rfc822
import linkcheck.strformat
import strformat
class CookieError (StandardError):
@ -189,7 +189,7 @@ class HttpCookie (object):
self.attributes[key] = value
def parse (self, text, patt=Cookie._CookiePattern):
text = linkcheck.strformat.ascii_safe(text)
text = strformat.ascii_safe(text)
# reset values
self.name = None
self.value = None
@ -220,9 +220,9 @@ class HttpCookie (object):
self.calculate_expiration()
def set_default_attributes (self, scheme, host, path):
scheme = linkcheck.strformat.ascii_safe(scheme)
host = linkcheck.strformat.ascii_safe(host)
path = linkcheck.strformat.ascii_safe(path)
scheme = strformat.ascii_safe(scheme)
host = strformat.ascii_safe(host)
path = strformat.ascii_safe(path)
if "domain" not in self.attributes:
self.attributes["domain"] = host.lower()
if "path" not in self.attributes:

View file

@ -73,7 +73,7 @@ import mimetools
import socket
from urlparse import urlsplit
from cStringIO import StringIO
import linkcheck.cache.addrinfo
import cache.addrinfo
__all__ = ["HTTP", "HTTPResponse", "HTTPConnection", "HTTPSConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
@ -612,7 +612,7 @@ class HTTPConnection:
def connect(self):
"""Connect to the host and port specified in __init__."""
msg = "getaddrinfo returns an empty list"
for res in linkcheck.cache.addrinfo.getaddrinfo(self.host, self.port):
for res in cache.addrinfo.getaddrinfo(self.host, self.port):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)

View file

@ -19,7 +19,7 @@ Locking utility class.
"""
import threading
import linkcheck
import linkcheck.log
import log
def get_lock (name):
return DebugLock(threading.Lock(), name)
@ -38,10 +38,10 @@ class DebugLock (object):
Acquire lock.
"""
threadname = threading.currentThread().getName()
assert None == linkcheck.log.debug(linkcheck.LOG_THREAD,
assert None == log.debug(linkcheck.LOG_THREAD,
"Acquire %s for %s", self.name, threadname)
self.lock.acquire(blocking)
assert None == linkcheck.log.debug(linkcheck.LOG_THREAD,
assert None == log.debug(linkcheck.LOG_THREAD,
"...acquired %s for %s", self.name, threadname)
def release (self):
@ -49,6 +49,6 @@ class DebugLock (object):
Release lock.
"""
threadname = threading.currentThread().getName()
assert None == linkcheck.log.debug(linkcheck.LOG_THREAD,
assert None == log.debug(linkcheck.LOG_THREAD,
"Release %s for %s", self.name, threadname)
self.lock.release()

View file

@ -33,8 +33,8 @@ import gzip
import sys
import cStringIO as StringIO
import linkcheck
import linkcheck.configuration
import linkcheck.log
import configuration
import log
__all__ = ["RobotFileParser"]
@ -160,7 +160,7 @@ class RobotFileParser (object):
"""
self._reset()
headers = {
'User-Agent': linkcheck.configuration.UserAgent,
'User-Agent': configuration.UserAgent,
'Accept-Encoding' : 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5',
}
req = urllib2.Request(self.url, None, headers)
@ -169,11 +169,11 @@ class RobotFileParser (object):
except urllib2.HTTPError, x:
if x.code in (401, 403):
self.disallow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s disallow all", self.url)
else:
self.allow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s allow all", self.url)
except socket.timeout:
raise
@ -182,27 +182,27 @@ class RobotFileParser (object):
if isinstance(x.reason, socket.timeout):
raise
self.allow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s allow all", self.url)
except (socket.gaierror, socket.error):
# no network
self.allow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s allow all", self.url)
except IOError, msg:
self.allow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s allow all", self.url)
except httplib.HTTPException:
self.allow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s allow all", self.url)
except ValueError:
# XXX bug workaround:
# urllib2.AbstractDigestAuthHandler raises ValueError on
# failed authorisation
self.disallow_all = True
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s disallow all", self.url)
def _read_content (self, req):
@ -246,7 +246,7 @@ class RobotFileParser (object):
@return: None
"""
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s parse lines", self.url)
state = 0
linenumber = 0
@ -256,7 +256,7 @@ class RobotFileParser (object):
linenumber += 1
if not line:
if state == 1:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: allow or disallow directives without" \
" any user-agent line", self.url, linenumber)
entry = Entry()
@ -278,7 +278,7 @@ class RobotFileParser (object):
line[1] = urllib.unquote(line[1].strip())
if line[0] == "user-agent":
if state == 2:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: missing blank line before user-agent" \
" directive", self.url, linenumber)
self._add_entry(entry)
@ -287,7 +287,7 @@ class RobotFileParser (object):
state = 1
elif line[0] == "disallow":
if state == 0:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: missing user-agent directive before" \
" this line", self.url, linenumber)
else:
@ -295,14 +295,14 @@ class RobotFileParser (object):
state = 2
elif line[0] == "allow":
if state == 0:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: missing user-agent directive before" \
" this line", self.url, linenumber)
else:
entry.rulelines.append(RuleLine(line[1], 1))
elif line[0] == "crawl-delay":
if state == 0:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: missing user-agent directive before" \
" this line", self.url, linenumber)
else:
@ -310,21 +310,21 @@ class RobotFileParser (object):
entry.crawldelay = max(0, int(line[1]))
state = 2
except ValueError:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: invalid delay number %r",
self.url, linenumber, line[1])
pass
else:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: unknown key %s",
self.url, linenumber, line[0])
else:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s line %d: malformed line %s",
self.url, linenumber, line)
if state in (1, 2):
self.entries.append(entry)
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"Parsed rules:\n%s", str(self))
def can_fetch (self, useragent, url):
@ -334,7 +334,7 @@ class RobotFileParser (object):
@return: True if agent can fetch url, else False
@rtype: bool
"""
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s check allowance for:\n" \
" user agent: %r\n url: %r", self.url, useragent, url)
if not isinstance(useragent, str):
@ -476,7 +476,7 @@ class Entry (object):
@rtype: bool
"""
for line in self.rulelines:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"%s %s %s", filename, str(line), line.allowance)
if line.applies_to(filename):
return line.allowance
@ -507,7 +507,7 @@ def decode (page):
"""
Gunzip or deflate a compressed page.
"""
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"robots.txt page info %d %s", page.code, str(page.info()))
encoding = page.info().get("Content-Encoding")
if encoding in ('gzip', 'x-gzip', 'deflate'):
@ -519,7 +519,7 @@ def decode (page):
else:
fp = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(content))
except zlib.error, msg:
assert None == linkcheck.log.debug(linkcheck.LOG_CHECK,
assert None == log.debug(linkcheck.LOG_CHECK,
"uncompressing had error "
"%s, assuming non-compressed content", str(msg))
fp = StringIO.StringIO(content)