documentation and syntax fixes

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@2378 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2005-03-09 10:26:30 +00:00
parent 0d063a3b53
commit f344b75c8e
11 changed files with 72 additions and 21 deletions

View file

@ -79,6 +79,9 @@ class Cache (object):
self.pool = linkcheck.checker.pool.ConnectionPool()
def incoming_is_empty (self):
"""
Check if incoming queue is empty.
"""
self.lock.acquire()
try:
return len(self.incoming) <= 0
@ -144,6 +147,12 @@ class Cache (object):
self.lock.release()
def has_incoming (self, key):
"""
Check if incoming queue has an entry with the given key.
@param key: Usually obtained from url_data.cache_url_key
@type key: String
"""
self.lock.acquire()
try:
return key in self.incoming
@ -151,6 +160,12 @@ class Cache (object):
self.lock.release()
def has_in_progress (self, key):
"""
Check if in-progress queue has an entry with the given key.
@param key: Usually obtained from url_data.cache_url_key
@type key: String
"""
self.lock.acquire()
try:
return key in self.in_progress
@ -158,7 +173,9 @@ class Cache (object):
self.lock.release()
def in_progress_remove (self, url_data):
"""remove url from in-progress cache"""
"""
Remove url from in-progress cache.
"""
self.lock.acquire()
try:
key = url_data.cache_url_key
@ -168,7 +185,9 @@ class Cache (object):
self.lock.release()
def checked_add (self, url_data):
"""cache checked url data"""
"""
Cache checked url data.
"""
self.lock.acquire()
try:
data = url_data.get_cache_data()

View file

@ -40,6 +40,9 @@ class FtpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
def __init__ (self, base_url, recursion_level, consumer,
parent_url = None,
base_ref = None, line=0, column=0, name=u""):
"""
Initialize FTP url data.
"""
super(FtpUrl, self).__init__(base_url, recursion_level, consumer,
parent_url=parent_url, base_ref=base_ref,
line=line, column=column, name=name)
@ -155,6 +158,9 @@ class FtpUrl (urlbase.UrlBase, proxysupport.ProxySupport):
"""
files = []
def add_entry (line):
"""
Parse list line and add the entry it points to to the file list.
"""
linkcheck.log.debug(linkcheck.LOG_CHECK, "Directory entry %r",
line)
try:

View file

@ -56,6 +56,9 @@ class ProxySupport (object):
def ignore_proxy_host (self):
"""
Check if self.host is in the no-proxy-for ignore list.
"""
for ro in self.consumer.config["noproxyfor"]:
if ro.search(self.host):
return True

View file

@ -82,6 +82,11 @@ def print_app_info ():
def urljoin (parent, url, scheme):
"""
If url is relative, join parent and url. Else leave url as-is.
@return join url
"""
if url.startswith(scheme+":"):
return url
return urlparse.urljoin(parent, url)

View file

@ -150,6 +150,10 @@ class Configuration (dict):
self.init_geoip()
def init_geoip (self):
"""
If GeoIP.dat file is found, initialize a standard geoip DB and
store it in self["geoip"]; else this value will be None.
"""
geoip_dat = "/usr/share/GeoIP/GeoIP.dat"
if _has_geoip and os.path.exists(geoip_dat):
self["geoip"] = GeoIP.open(geoip_dat, GeoIP.GEOIP_STANDARD)
@ -332,7 +336,8 @@ class Configuration (dict):
except ConfigParser.Error, msg:
linkcheck.log.debug(linkcheck.LOG_CHECK, msg)
try:
self["externstrictall"] = cfgparser.getboolean(section, "externstrictall")
self["externstrictall"] = \
cfgparser.getboolean(section, "externstrictall")
except ConfigParser.Error, msg:
linkcheck.log.debug(linkcheck.LOG_CHECK, msg)
try:

View file

@ -73,6 +73,7 @@ class ListDict (dict):
"""
Initialize sorted key list.
"""
super(ListDict, self).__init__()
# sorted list of keys
self._keys = []

View file

@ -122,7 +122,7 @@ class Record (object):
data = sock.recv(content_length - len(self.content))
self.content += data
if padding_length != 0:
_padding = sock.recv(padding_length)
sock.recv(padding_length)
# Parse the content information
c = self.content
@ -220,12 +220,13 @@ def HandleManTypes (r, conn):
if r.rec_type == FCGI_GET_VALUES:
r.rec_type = FCGI_GET_VALUES_RESULT
v = {}
vars = {'FCGI_MAX_CONNS' : FCGI_MAX_CONNS,
_vars = {'FCGI_MAX_CONNS' : FCGI_MAX_CONNS,
'FCGI_MAX_REQS' : FCGI_MAX_REQS,
'FCGI_MPXS_CONNS': FCGI_MPXS_CONNS}
for i in r.values.keys():
if vars.has_key(i): v[i]=vars[i]
r.values = vars
if _vars.has_key(i):
v[i] = _vars[i]
r.values = _vars
r.write_record(conn)
@ -334,7 +335,6 @@ class FastCGIWriter (object):
_isFCGI = 1 # assume it is until we find out for sure
def isFCGI ():
global _isFCGI
return _isFCGI
@ -469,8 +469,8 @@ def _startup ():
s = socket.fromfd(sys.stdin.fileno(), socket.AF_INET,
socket.SOCK_STREAM)
s.getpeername()
except socket.error, (err, errmsg):
if err != errno.ENOTCONN: # must be a non-fastCGI environment
except socket.error, msg:
if msg[0] != errno.ENOTCONN: # must be a non-fastCGI environment
global _isFCGI
_isFCGI = 0
return

View file

@ -30,6 +30,9 @@ import linkcheck.configuration
import linkcheck.url
import linkcheck.i18n
import linkcheck.strformat
import linkcheck.checker
import linkcheck.checker.cache
import linkcheck.checker.consumer
_logfile = None
_supported_langs = ('de', 'fr', 'nl', 'C')
@ -98,9 +101,6 @@ def checklink (out=sys.stdout, form=None, env=os.environ):
config["externlinks"].append(
linkcheck.get_link_pat("^%s$" % linkcheck.url.safe_url_pattern))
config["externlinks"].append(linkcheck.get_link_pat(".*", strict=True))
import linkcheck.checker
import linkcheck.checker.cache
import linkcheck.checker.consumer
cache = linkcheck.checker.cache.Cache()
consumer = linkcheck.checker.consumer.Consumer(config, cache)
# start checking

View file

@ -127,10 +127,16 @@ class DOTLogger (linkcheck.logger.Logger):
def dotquote (s):
"""
Escape disallowed characters in DOT format strings.
"""
return s.replace('"', '\\"')
def dotedge (s):
"""
Escape disallowed characters in DOT edge labels.
"""
s = s.replace("\n", "\\n")
s = s.replace("\r", "\\r")
s = s.replace("\l", "\\l")

View file

@ -272,7 +272,7 @@ class HtmlLogger (linkcheck.logger.Logger):
self.field("result")+u"</td><td bgcolor=\""+self.colorerror+u"\">")
self.write(_("Error"))
if url_data.result:
self.write(u": "+cgi.escape(url_data.result))
self.write(u": "+cgi.escape(url_data.result))
self.writeln(u"</td></tr>")
def end_output (self, linknumber=-1):

View file

@ -37,11 +37,14 @@ __version__ = "1.1"
MESSAGES = {}
def usage (code, msg=''):
def usage (ecode, msg=''):
"""
Print usage and msg and exit with given code.
"""
print >> sys.stderr, __doc__
if msg:
print >> sys.stderr, msg
sys.exit(code)
sys.exit(ecode)
def add (msgid, transtr, fuzzy):
@ -54,19 +57,21 @@ def add (msgid, transtr, fuzzy):
def generate ():
"Return the generated output."
"""
Return the generated output.
"""
global MESSAGES
keys = MESSAGES.keys()
# the keys are sorted in the .mo file
keys.sort()
offsets = []
ids = strs = ''
for id in keys:
for _id in keys:
# For each string, we need size and file offset. Each string is NUL
# terminated; the NUL does not count into the size.
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
ids += id + '\0'
strs += MESSAGES[id] + '\0'
offsets.append((len(ids), len(_id), len(strs), len(MESSAGES[_id])))
ids += _id + '\0'
strs += MESSAGES[_id] + '\0'
output = ''
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
# the keys start right after the index tables.
@ -119,6 +124,7 @@ def make (filename, outfile):
fuzzy = 0
# Parse the catalog
msgid = msgstr = ''
lno = 0
for l in lines:
lno += 1