bk movements

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@1375 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2004-07-20 14:50:00 +00:00
parent 5ad8c827b4
commit b60070a922
24 changed files with 166 additions and 289 deletions

View file

@ -27,7 +27,6 @@ import _linkchecker_configdata
import bk
import bk.containers
import linkcheck
import linkcheck.i18n
import linkcheck.Threader
try:
import threading

View file

@ -16,7 +16,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import re, sys, htmlentitydefs
import re
import sys
import htmlentitydefs
markup_re = re.compile("<.*?>", re.DOTALL)
entities = htmlentitydefs.entitydefs.items()

View file

@ -19,7 +19,8 @@
import sys
import re
import time
import linkcheck.i18n
import bk.i18n
import bk.strtime
# logger areas
@ -33,33 +34,6 @@ class LinkCheckerError (Exception):
pass
def strtime (t):
"""return ISO 8601 formatted time"""
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t)) + \
strtimezone()
def strduration (duration):
"""return string formatted time duration"""
name = linkcheck.i18n._("seconds")
if duration > 60:
duration = duration / 60
name = linkcheck.i18n._("minutes")
if duration > 60:
duration = duration / 60
name = linkcheck.i18n._("hours")
return " %.3f %s"%(duration, name)
def strtimezone ():
"""return timezone info, %z on some platforms, but not supported on all"""
if time.daylight:
zone = time.altzone
else:
zone = time.timezone
return "%+04d" % int(-zone/3600)
def getLinkPat (arg, strict=False):
"""get a link pattern matcher for intern/extern links"""
bk.log.debug(LOG_CHECK, "Link pattern %r", arg)
@ -80,8 +54,8 @@ def printStatus (config, curtime, start_time):
tocheck = len(config.urls)
links = config['linknumber']
active = config.threader.active_threads()
duration = strduration(curtime - start_time)
print >>sys.stderr, linkcheck.i18n._("%5d urls queued, %4d links checked, %2d active threads, runtime %s")%\
duration = bk.strtime.strduration(curtime - start_time)
print >>sys.stderr, bk.i18n._("%5d urls queued, %4d links checked, %2d active threads, runtime %s")%\
(tocheck, links, active, duration)

View file

@ -52,7 +52,7 @@ class FtpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
else:
_user, _password = self.getUserPassword()
if _user is None or _password is None:
raise linkcheck.LinkCheckerError(linkcheck.i18n._("No user or password found"))
raise linkcheck.LinkCheckerError(bk.i18n._("No user or password found"))
self.login(_user, _password)
filename = self.cwd()
if filename:
@ -89,10 +89,10 @@ class FtpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
self.urlConnection.connect(self.urlparts[1])
self.urlConnection.login(_user, _password)
except EOFError:
raise linkcheck.LinkCheckerError(linkcheck.i18n._("Remote host has closed connection"))
raise linkcheck.LinkCheckerError(bk.i18n._("Remote host has closed connection"))
if not self.urlConnection.getwelcome():
self.closeConnection()
raise linkcheck.LinkCheckerError(linkcheck.i18n._("Got no answer from FTP server"))
raise linkcheck.LinkCheckerError(bk.i18n._("Got no answer from FTP server"))
# dont set info anymore, this may change every time we logged in
#self.setInfo(info)
@ -113,7 +113,7 @@ class FtpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
# it could be a directory if the trailing slash was forgotten
try:
self.urlConnection.cwd(filename)
self.setWarning(linkcheck.i18n._("Missing trailing directory slash in ftp url"))
self.setWarning(bk.i18n._("Missing trailing directory slash in ftp url"))
return
except ftplib.error_perm:
pass

View file

@ -43,4 +43,4 @@ class HostCheckingUrlData (linkcheck.UrlData.UrlData):
def checkConnection (self):
ip = socket.gethostbyname(self.host)
self.setValid(self.host+"("+ip+") "+linkcheck.i18n._("found"))
self.setValid(self.host+"("+ip+") "+bk.i18n._("found"))

View file

@ -57,7 +57,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
# XXX
# check for empty paths
if not self.urlparts[2]:
self.setWarning(linkcheck.i18n._("URL path is empty, assuming '/' as path"))
self.setWarning(bk.i18n._("URL path is empty, assuming '/' as path"))
self.urlparts[2] = '/'
self.url = urlparse.urlunsplit(self.urlparts)
@ -106,16 +106,16 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
# set the proxy, so a 407 status after this is an error
self.setProxy(self.config["proxy"].get(self.scheme))
if self.proxy:
self.setInfo(linkcheck.i18n._("Using Proxy %r")%self.proxy)
self.setInfo(bk.i18n._("Using Proxy %r")%self.proxy)
self.headers = None
self.auth = None
self.cookies = []
if not self.robotsTxtAllowsUrl():
self.setWarning(linkcheck.i18n._("Access denied by robots.txt, checked only syntax"))
self.setWarning(bk.i18n._("Access denied by robots.txt, checked only syntax"))
return
if _isAmazonHost(self.urlparts[1]):
self.setWarning(linkcheck.i18n._("Amazon servers block HTTP HEAD requests, "
self.setWarning(bk.i18n._("Amazon servers block HTTP HEAD requests, "
"using GET instead"))
self.method = "GET"
else:
@ -140,7 +140,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
if response.status == 305 and self.headers:
oldproxy = (self.proxy, self.proxyauth)
self.setProxy(self.headers.getheader("Location"))
self.setInfo(linkcheck.i18n._("Enforced Proxy %r")%self.proxy)
self.setInfo(bk.i18n._("Enforced Proxy %r")%self.proxy)
response = self._getHttpResponse()
self.headers = response.msg
self.proxy, self.proxyauth = oldproxy
@ -156,7 +156,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
redirectCache = [self.url]
fallback_GET = True
continue
self.setError(linkcheck.i18n._("more than %d redirections, aborting")%self.max_redirects)
self.setError(bk.i18n._("more than %d redirections, aborting")%self.max_redirects)
return
# user authentication
if response.status == 401:
@ -185,7 +185,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
if mime=='application/octet-stream' and \
(poweredby.startswith('Zope') or \
server.startswith('Zope')):
self.setWarning(linkcheck.i18n._("Zope Server cannot determine"
self.setWarning(bk.i18n._("Zope Server cannot determine"
" MIME type with HEAD, falling back to GET"))
self.method = "GET"
continue
@ -193,7 +193,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
# check url warnings
effectiveurl = urlparse.urlunsplit(self.urlparts)
if self.url != effectiveurl:
self.setWarning(linkcheck.i18n._("Effective URL %s") % effectiveurl)
self.setWarning(bk.i18n._("Effective URL %s") % effectiveurl)
self.url = effectiveurl
# check response
self.checkResponse(response, fallback_GET)
@ -207,7 +207,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
tries < self.max_redirects:
newurl = self.headers.getheader("Location",
self.headers.getheader("Uri", ""))
redirected = linkcheck.url.url_norm(urlparse.urljoin(redirected, newurl))
redirected = bk.url.url_norm(urlparse.urljoin(redirected, newurl))
# note: urlparts has to be a list
self.urlparts = list(urlparse.urlsplit(redirected))
# check internal redirect cache to avoid recursion
@ -219,17 +219,17 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
self.urlparts = list(urlparse.urlsplit(self.url))
return self.max_redirects, response
self.setError(
linkcheck.i18n._("recursive redirection encountered:\n %s") % \
bk.i18n._("recursive redirection encountered:\n %s") % \
"\n => ".join(redirectCache))
return -1, response
redirectCache.append(redirected)
# remember this alias
if response.status == 301:
if not self.has301status:
self.setWarning(linkcheck.i18n._("HTTP 301 (moved permanent) encountered: you "
self.setWarning(bk.i18n._("HTTP 301 (moved permanent) encountered: you "
"should update this link."))
if not (self.url.endswith('/') or self.url.endswith('.html')):
self.setWarning(linkcheck.i18n._("A HTTP 301 redirection occured and the url has no "
self.setWarning(bk.i18n._("A HTTP 301 redirection occured and the url has no "
"trailing / at the end. All urls which point to (home) "
"directories should end with a / to avoid redirection."))
self.has301status = True
@ -244,7 +244,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
# check if we still have a http url, it could be another
# scheme, eg https or news
if self.urlparts[0]!="http":
self.setWarning(linkcheck.i18n._("HTTP redirection to non-http url encountered; "
self.setWarning(bk.i18n._("HTTP redirection to non-http url encountered; "
"the original url was %r.")%self.url)
# make new UrlData object
newobj = linkcheck.UrlData.GetUrlDataFrom(redirected, self.recursionLevel, self.config,
@ -273,11 +273,11 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
if self.headers and self.headers.has_key("Server"):
server = self.headers['Server']
else:
server = linkcheck.i18n._("unknown")
server = bk.i18n._("unknown")
if fallback_GET:
self.setWarning(linkcheck.i18n._("Server %r did not support HEAD request, used GET for checking")%server)
self.setWarning(bk.i18n._("Server %r did not support HEAD request, used GET for checking")%server)
if self.no_anchor:
self.setWarning(linkcheck.i18n._("Server %r had no anchor support, removed anchor from request")%server)
self.setWarning(bk.i18n._("Server %r had no anchor support, removed anchor from request")%server)
if response.status == 204:
# no content
self.setWarning(response.reason)
@ -294,7 +294,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
self.setValid("OK")
modified = self.headers.get('Last-Modified', '')
if modified:
self.setInfo(linkcheck.i18n._("Last modified %s") % modified)
self.setInfo(bk.i18n._("Last modified %s") % modified)
def getCacheKeys (self):
@ -318,7 +318,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
self.closeConnection()
self.urlConnection = self.getHTTPObject(host, scheme)
# quote url before submit
url = linkcheck.url.url_quote(urlparse.urlunsplit(self.urlparts))
url = bk.url.url_quote(urlparse.urlunsplit(self.urlparts))
qurlparts = list(urlparse.urlsplit(url))
if self.no_anchor:
qurlparts[4] = ''
@ -394,7 +394,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
encoding = self.headers.get("Content-Encoding")
if encoding and encoding not in _supported_encodings and \
encoding!='identity':
self.setWarning(linkcheck.i18n._('Unsupported content encoding %r.')%encoding)
self.setWarning(bk.i18n._('Unsupported content encoding %r.')%encoding)
return False
return True
@ -418,7 +418,7 @@ class HttpUrlData (linkcheck.ProxyUrlData.ProxyUrlData):
encoding = self.headers.get("Content-Encoding")
if encoding and encoding not in _supported_encodings and \
encoding!='identity':
self.setWarning(linkcheck.i18n._('Unsupported content encoding %r.')%encoding)
self.setWarning(bk.i18n._('Unsupported content encoding %r.')%encoding)
return False
return True

View file

@ -27,5 +27,5 @@ class HttpsUrlData (linkcheck.HttpUrlData.HttpUrlData):
if linkcheck.HttpUrlData.supportHttps:
super(HttpsUrlData, self)._check()
else:
self.setWarning(linkcheck.i18n._("%s url ignored")%self.scheme.capitalize())
self.setWarning(bk.i18n._("%s url ignored")%self.scheme.capitalize())
self.logMe()

View file

@ -59,7 +59,7 @@ class IgnoredUrlData (linkcheck.UrlData.UrlData):
"""Some schemes are defined in http://www.w3.org/Addressing/schemes"""
def _check (self):
self.setWarning(linkcheck.i18n._("%s url ignored")%self.scheme.capitalize())
self.setWarning(bk.i18n._("%s url ignored")%self.scheme.capitalize())
self.logMe()
def hasContent (self):

View file

@ -69,7 +69,7 @@ class MailtoUrlData (HostCheckingUrlData):
an answer, print the verified adress as an info.
"""
if not self.adresses:
self.setWarning(linkcheck.i18n._("No adresses found"))
self.setWarning(bk.i18n._("No adresses found"))
return
value = "unknown reason"
@ -81,7 +81,7 @@ class MailtoUrlData (HostCheckingUrlData):
mxrecords = mxlookup(host)
linkcheck.Config.debug(HURT_ME_PLENTY, "found mailhosts", mxrecords)
if not len(mxrecords):
self.setWarning(linkcheck.i18n._("No MX mail host for %s found")%host)
self.setWarning(bk.i18n._("No MX mail host for %s found")%host)
return
smtpconnect = 0
for mxrecord in mxrecords:
@ -94,18 +94,18 @@ class MailtoUrlData (HostCheckingUrlData):
info = self.urlConnection.verify(user)
linkcheck.Config.debug(HURT_ME_PLENTY, "SMTP user info", info)
if info[0]==250:
self.setInfo(linkcheck.i18n._("Verified adress: %s")%str(info[1]))
self.setInfo(bk.i18n._("Verified adress: %s")%str(info[1]))
except:
etype, value = sys.exc_info()[:2]
#print etype,value
if smtpconnect: break
if not smtpconnect:
self.setWarning(linkcheck.i18n._("None of the MX mail hosts for %s accepts an "
self.setWarning(bk.i18n._("None of the MX mail hosts for %s accepts an "
"SMTP connection: %s") % (host, str(value)))
mxrecord = mxrecords[0][1]
else:
mxrecord = mxrecord[1]
self.setValid(linkcheck.i18n._("found MX mail host %s") % mxrecord)
self.setValid(bk.i18n._("found MX mail host %s") % mxrecord)
def _split_adress (self, adress):
@ -116,7 +116,7 @@ class MailtoUrlData (HostCheckingUrlData):
return tuple(split)
if len(split)==1:
return (split[0], "localhost")
raise linkcheck.LinkCheckerError(linkcheck.i18n._("could not split the mail adress"))
raise linkcheck.LinkCheckerError(bk.i18n._("could not split the mail adress"))
def closeConnection (self):

View file

@ -47,7 +47,7 @@ class NntpUrlData (UrlData):
def checkConnection (self):
nntpserver = self.urlparts[1] or self.config["nntpserver"]
if not nntpserver:
self.setWarning(linkcheck.i18n._("No NNTP server specified, skipping this URL"))
self.setWarning(bk.i18n._("No NNTP server specified, skipping this URL"))
return
nntp = self._connectNntp(nntpserver)
group = self.urlparts[2]
@ -56,18 +56,18 @@ class NntpUrlData (UrlData):
if '@' in group:
# request article
resp,number,mid = nntp.stat("<"+group+">")
self.setInfo(linkcheck.i18n._('Articel number %s found') % number)
self.setInfo(bk.i18n._('Articel number %s found') % number)
else:
# split off trailing articel span
group = group.split('/',1)[0]
if group:
# request group info
resp,count,first,last,name = nntp.group(group)
self.setInfo(linkcheck.i18n._("Group %s has %s articles, range %s to %s") %\
self.setInfo(bk.i18n._("Group %s has %s articles, range %s to %s") %\
(name, count, first, last))
else:
# group name is the empty string
self.setWarning(linkcheck.i18n._("No newsgroup specified in NNTP URL"))
self.setWarning(bk.i18n._("No newsgroup specified in NNTP URL"))
def _connectNntp (self, nntpserver):
@ -87,9 +87,9 @@ class NntpUrlData (UrlData):
else:
raise
if nntp is None:
raise linkcheck.LinkCheckerError(linkcheck.i18n._("NTTP server too busy; tried more than %d times")%tries)
raise linkcheck.LinkCheckerError(bk.i18n._("NTTP server too busy; tried more than %d times")%tries)
if value is not None:
self.setWarning(linkcheck.i18n._("NNTP busy: %s")%str(value))
self.setWarning(bk.i18n._("NNTP busy: %s")%str(value))
return nntp

View file

@ -33,7 +33,7 @@ class TelnetUrlData (linkcheck.HostCheckingUrlData.HostCheckingUrlData):
self.host, self.port = urllib.splitport(self.host)
if self.port is not None:
if not linkcheck.UrlData.is_valid_port(self.port):
raise linkcheck.LinkCheckerError(linkcheck.i18n._("URL has invalid port number %s")\
raise linkcheck.LinkCheckerError(bk.i18n._("URL has invalid port number %s")\
% self.port)
self.port = int(self.port)
else:

View file

@ -33,7 +33,7 @@ ws_at_start_or_end = re.compile(r"(^\s+)|(\s+$)").search
# helper function for internal errors
def internal_error ():
print >>sys.stderr, linkcheck.i18n._("""\n********** Oops, I did it again. *************
print >>sys.stderr, bk.i18n._("""\n********** Oops, I did it again. *************
You have found an internal error in LinkChecker. Please write a bug report
at http://sourceforge.net/tracker/?func=add&group_id=1913&atid=101913
@ -50,13 +50,13 @@ I can work with ;).
print >>sys.stderr, etype, value
traceback.print_exc()
print_app_info()
print >>sys.stderr, linkcheck.i18n._("\n******** LinkChecker internal error, bailing out ********")
print >>sys.stderr, bk.i18n._("\n******** LinkChecker internal error, bailing out ********")
sys.exit(1)
def print_app_info ():
import os
print >>sys.stderr, linkcheck.i18n._("System info:")
print >>sys.stderr, bk.i18n._("System info:")
print >>sys.stderr, linkcheck.Config.App
print >>sys.stderr, "Python %s on %s" % (sys.version, sys.platform)
for key in ("LC_ALL", "LC_MESSAGES", "http_proxy", "ftp_proxy"):
@ -103,8 +103,8 @@ class UrlData (object):
self.config = config
self.parentName = parentName
self.baseRef = baseRef
self.errorString = linkcheck.i18n._("Error")
self.validString = linkcheck.i18n._("Valid")
self.errorString = bk.i18n._("Error")
self.validString = bk.i18n._("Valid")
self.warningString = None
self.infoString = None
self.valid = True
@ -127,11 +127,11 @@ class UrlData (object):
def setError (self, s):
self.valid = False
self.errorString = linkcheck.i18n._("Error")+": "+s
self.errorString = bk.i18n._("Error")+": "+s
def setValid (self, s):
self.valid = True
self.validString = linkcheck.i18n._("Valid")+": "+s
self.validString = bk.i18n._("Valid")+": "+s
def isParseable (self):
return False
@ -197,7 +197,7 @@ class UrlData (object):
self.userinfo, host = urllib.splituser(self.urlparts[1])
x, port = urllib.splitport(host)
if port is not None and not is_valid_port(port):
raise linkcheck.LinkCheckerError(linkcheck.i18n._("URL has invalid port number %r")\
raise linkcheck.LinkCheckerError(bk.i18n._("URL has invalid port number %r")\
% str(port))
# set host lowercase and without userinfo
self.urlparts[1] = host.lower()
@ -237,7 +237,7 @@ class UrlData (object):
debug(BRING_IT_ON, "extern =", self.extern)
if self.extern[0] and (self.config["strict"] or self.extern[1]):
self.setWarning(
linkcheck.i18n._("outside of domain filter, checked only syntax"))
bk.i18n._("outside of domain filter, checked only syntax"))
self.logMe()
return
@ -254,10 +254,10 @@ class UrlData (object):
debug(HURT_ME_PLENTY, "exception", traceback.format_tb(etb))
# make nicer error msg for unknown hosts
if isinstance(evalue, socket.error) and evalue[0]==-2:
evalue = linkcheck.i18n._('Hostname not found')
evalue = bk.i18n._('Hostname not found')
# make nicer error msg for bad status line
if isinstance(evalue, linkcheck.httplib2.BadStatusLine):
evalue = linkcheck.i18n._('Bad HTTP response %r')%str(evalue)
evalue = bk.i18n._('Bad HTTP response %r')%str(evalue)
self.setError(str(evalue))
# check content
@ -282,7 +282,7 @@ class UrlData (object):
except tuple(ExcList):
value, tb = sys.exc_info()[1:]
debug(HURT_ME_PLENTY, "exception", traceback.format_tb(tb))
self.setError(linkcheck.i18n._("could not parse content: %r")%str(value))
self.setError(bk.i18n._("could not parse content: %r")%str(value))
# close
self.closeConnection()
self.logMe()
@ -292,11 +292,11 @@ class UrlData (object):
def checkSyntax (self):
debug(BRING_IT_ON, "checking syntax")
if not self.urlName or self.urlName=="":
self.setError(linkcheck.i18n._("URL is null or empty"))
self.setError(bk.i18n._("URL is null or empty"))
self.logMe()
return False
if ws_at_start_or_end(self.urlName):
self.setError(linkcheck.i18n._("URL has whitespace at beginning or end"))
self.setError(bk.i18n._("URL has whitespace at beginning or end"))
self.logMe()
return False
try:
@ -399,7 +399,7 @@ class UrlData (object):
for cur_anchor,line,column,name,base in h.urls:
if cur_anchor == self.anchor:
return
self.setWarning(linkcheck.i18n._("anchor #%s not found") % self.anchor)
self.setWarning(bk.i18n._("anchor #%s not found") % self.anchor)
def _getExtern (self):
if not (self.config["externlinks"] or self.config["internlinks"]):
@ -456,14 +456,14 @@ class UrlData (object):
return
match = warningregex.search(self.getContent())
if match:
self.setWarning(linkcheck.i18n._("Found %r in link contents")%match.group())
self.setWarning(bk.i18n._("Found %r in link contents")%match.group())
def checkSize (self):
"""if a maximum size was given, call this function to check it
against the content size of this url"""
maxbytes = self.config["warnsizebytes"]
if maxbytes is not None and self.dlsize >= maxbytes:
self.setWarning(linkcheck.i18n._("Content size %s is larger than %s")%\
self.setWarning(bk.i18n._("Content size %s is larger than %s")%\
(linkcheck.StringUtil.strsize(self.dlsize),
linkcheck.StringUtil.strsize(maxbytes)))
@ -491,7 +491,7 @@ class UrlData (object):
if len(h.urls)>=1:
baseRef = h.urls[0][0]
if len(h.urls)>1:
self.setWarning(linkcheck.i18n._(
self.setWarning(bk.i18n._(
"more than one <base> tag found, using only the first one"))
h = linkcheck.linkparse.LinkFinder(self.getContent())
p = bk.HtmlParser.htmlsax.parser(h)

View file

@ -71,7 +71,7 @@ def checkUrls (config):
config.finish()
config.log_endOfOutput()
active = config.threader.active_threads()
linkcheck.log.warn(LOG_CHECK, linkcheck.i18n._("keyboard interrupt; waiting for %d active threads to finish") % active)
linkcheck.log.warn(LOG_CHECK, bk.i18n._("keyboard interrupt; waiting for %d active threads to finish") % active)
raise

View file

@ -63,9 +63,9 @@ def checklink (out=sys.stdout, form={}, env=os.environ):
if form.has_key("anchors"): config["anchors"] = True
if not form.has_key("errors"): config["verbose"] = True
if form.has_key("intern"):
pat = linkcheck.url.safe_host_pattern(re.escape(getHostName(form)))
pat = bk.url.safe_host_pattern(re.escape(getHostName(form)))
else:
pat = linkcheck.url.safe_url_pattern
pat = bk.url.safe_url_pattern
config["internlinks"].append(linkcheck.getLinkPat("^%s$" % pat))
# avoid checking of local files or other nasty stuff
config["externlinks"].append(linkcheck.getLinkPat("^%s$" % safe_url_pattern))
@ -90,28 +90,28 @@ def checkform (form):
lang = form['language'].value
if lang in _supported_langs:
os.environ['LC_MESSAGES'] = lang
linkcheck.i18n.init_gettext()
bk.i18n.init_gettext()
else:
raise FormError(linkcheck.i18n._("unsupported language"))
raise FormError(bk.i18n._("unsupported language"))
# check url syntax
if form.has_key("url"):
url = form["url"].value
if not url or url=="http://":
raise FormError(linkcheck.i18n._("empty url was given"))
if not linkcheck.url.is_valid_url(url):
raise FormError(linkcheck.i18n._("invalid url was given"))
raise FormError(bk.i18n._("empty url was given"))
if not bk.url.is_valid_url(url):
raise FormError(bk.i18n._("invalid url was given"))
else:
raise FormError(linkcheck.i18n._("no url was given"))
raise FormError(bk.i18n._("no url was given"))
# check recursion level
if form.has_key("level"):
level = form["level"].value
if not _is_level(level):
raise FormError(linkcheck.i18n._("invalid recursion level"))
raise FormError(bk.i18n._("invalid recursion level"))
# check options
for option in ("strict", "anchors", "errors", "intern"):
if form.has_key(option):
if not form[option].value=="on":
raise FormError(linkcheck.i18n._("invalid %s option syntax") % option)
raise FormError(bk.i18n._("invalid %s option syntax") % option)
def logit (form, env):
"""log form errors"""
@ -120,7 +120,7 @@ def logit (form, env):
return
elif type(_logfile) == types.StringType:
_logfile = file(_logfile, "a")
_logfile.write("\n"+linkcheck.logger.strtime(time.time())+"\n")
_logfile.write("\n"+bk.strtime.strtime(time.time())+"\n")
for var in ["HTTP_USER_AGENT", "REMOTE_ADDR",
"REMOTE_HOST", "REMOTE_PORT"]:
if env.has_key(var):
@ -132,7 +132,7 @@ def logit (form, env):
def printError (out, why):
"""print standard error page"""
out.write(linkcheck.i18n._("""<html><head>
out.write(bk.i18n._("""<html><head>
<title>LinkChecker Online Error</title></head>
<body text=#192c83 bgcolor=#fff7e5 link=#191c83 vlink=#191c83 alink=#191c83>
<blockquote>

View file

@ -17,7 +17,7 @@
import re
import linkcheck
from linkcheck.debug import *
# ripped mainly from HTML::Tagset.pm
LinkTags = {

View file

@ -17,7 +17,7 @@
import time
import csv
import linkcheck.i18n
import bk.i18n
import linkcheck.logger.StandardLogger
import linkcheck.logger.Logger
@ -37,12 +37,12 @@ class CSVLogger (linkcheck.logger.StandardLogger.StandardLogger):
return
self.starttime = time.time()
if self.has_field("intro"):
self.fd.write("# "+(linkcheck.i18n._("created by %s at %s%s") % (linkcheck.Config.AppName, linkcheck.logger.strtime(self.starttime), self.lineterminator)))
self.fd.write("# "+(linkcheck.i18n._("Get the newest version at %s%s") % (linkcheck.Config.Url, self.lineterminator)))
self.fd.write("# "+(linkcheck.i18n._("Write comments and bugs to %s%s%s") % \
self.fd.write("# "+(bk.i18n._("created by %s at %s%s") % (linkcheck.Config.AppName, linkcheck.logger.strtime(self.starttime), self.lineterminator)))
self.fd.write("# "+(bk.i18n._("Get the newest version at %s%s") % (linkcheck.Config.Url, self.lineterminator)))
self.fd.write("# "+(bk.i18n._("Write comments and bugs to %s%s%s") % \
(linkcheck.Config.Email, self.lineterminator, self.lineterminator)))
self.fd.write(
linkcheck.i18n._("# Format of the entries:")+self.lineterminator+\
bk.i18n._("# Format of the entries:")+self.lineterminator+\
"# urlname;"+self.lineterminator+\
"# recursionlevel;"+self.lineterminator+\
"# parentname;"+self.lineterminator+\
@ -68,10 +68,10 @@ class CSVLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.fd is None:
return
row = [urlData.urlName, urlData.recursionLevel,
linkcheck.url.url_quote(urlData.parentName or ""), urlData.baseRef,
bk.url.url_quote(urlData.parentName or ""), urlData.baseRef,
urlData.errorString, urlData.validString,
urlData.warningString, urlData.infoString,
urlData.valid, linkcheck.url.url_quote(urlData.url),
urlData.valid, bk.url.url_quote(urlData.url),
urlData.line, urlData.column,
urlData.name, urlData.dltime,
urlData.dlsize, urlData.checktime,
@ -86,7 +86,7 @@ class CSVLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.stoptime = time.time()
if self.has_field("outro"):
duration = self.stoptime - self.starttime
self.fd.write("# "+linkcheck.i18n._("Stopped checking at %s (%s)%s")%\
self.fd.write("# "+bk.i18n._("Stopped checking at %s (%s)%s")%\
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration), self.lineterminator))
self.flush()

View file

@ -15,7 +15,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import linkcheck.i18n
import bk.i18n
import linkcheck.AnsiColor
import linkcheck.logger.StandardLogger
@ -51,7 +51,7 @@ class ColoredLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write("\n"+self.field("parenturl")+
self.spaces("parenturl")+
self.colorparent+
linkcheck.url.url_quote(urlData.parentName or "")+
bk.url.url_quote(urlData.parentName or "")+
self.colorreset+"\n")
self.currentPage = urlData.parentName
self.prefix = 1
@ -68,11 +68,11 @@ class ColoredLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write(self.field("url")+self.spaces("url")+self.colorurl+
urlData.urlName+self.colorreset)
if urlData.line:
self.fd.write(linkcheck.i18n._(", line %d")%urlData.line)
self.fd.write(bk.i18n._(", line %d")%urlData.line)
if urlData.column:
self.fd.write(linkcheck.i18n._(", col %d")%urlData.column)
self.fd.write(bk.i18n._(", col %d")%urlData.column)
if urlData.cached:
self.fd.write(linkcheck.i18n._(" (cached)\n"))
self.fd.write(bk.i18n._(" (cached)\n"))
else:
self.fd.write("\n")
@ -91,14 +91,14 @@ class ColoredLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.prefix:
self.fd.write("| ")
self.fd.write(self.field("realurl")+self.spaces("realurl")+
self.colorreal+linkcheck.url.url_quote(urlData.url)+
self.colorreal+bk.url.url_quote(urlData.url)+
self.colorreset+"\n")
if urlData.dltime>=0 and self.has_field("dltime"):
if self.prefix:
self.fd.write("| ")
self.fd.write(self.field("dltime")+self.spaces("dltime")+
self.colordltime+
(linkcheck.i18n._("%.3f seconds") % urlData.dltime)+
(bk.i18n._("%.3f seconds") % urlData.dltime)+
self.colorreset+"\n")
if urlData.dlsize>=0 and self.has_field("dlsize"):
if self.prefix:
@ -111,7 +111,7 @@ class ColoredLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write("| ")
self.fd.write(self.field("checktime")+self.spaces("checktime")+
self.colordltime+
(linkcheck.i18n._("%.3f seconds") % urlData.checktime)+self.colorreset+"\n")
(bk.i18n._("%.3f seconds") % urlData.checktime)+self.colorreset+"\n")
if urlData.infoString and self.has_field("info"):
if self.prefix:

View file

@ -35,10 +35,10 @@ class GMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
return
self.starttime = time.time()
if self.has_field("intro"):
self.fd.write("# "+(linkcheck.i18n._("created by %s at %s\n") % (linkcheck.Config.AppName,
self.fd.write("# "+(bk.i18n._("created by %s at %s\n") % (linkcheck.Config.AppName,
linkcheck.logger.strtime(self.starttime))))
self.fd.write("# "+(linkcheck.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url))
self.fd.write("# "+(linkcheck.i18n._("Write comments and bugs to %s\n\n") % \
self.fd.write("# "+(bk.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url))
self.fd.write("# "+(bk.i18n._("Write comments and bugs to %s\n\n") % \
linkcheck.Config.Email))
self.fd.write("graph [\n directed 1\n")
self.flush()
@ -55,7 +55,7 @@ class GMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write(" node [\n")
self.fd.write(" id %d\n" % node.id)
if self.has_field("realurl"):
self.fd.write(' label "%s"\n' % linkcheck.url.url_quote(node.url))
self.fd.write(' label "%s"\n' % bk.url.url_quote(node.url))
if node.dltime>=0 and self.has_field("dltime"):
self.fd.write(" dltime %d\n" % node.dltime)
if node.dlsize>=0 and self.has_field("dlsize"):
@ -91,7 +91,7 @@ class GMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.has_field("outro"):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.fd.write("# "+linkcheck.i18n._("Stopped checking at %s (%s)\n")%\
self.fd.write("# "+bk.i18n._("Stopped checking at %s (%s)\n")%\
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration)))
self.flush()

View file

@ -56,7 +56,7 @@ class HtmlLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.has_field('intro'):
self.fd.write("<center><h2>"+linkcheck.Config.App+"</h2></center>"+
"<br><blockquote>"+linkcheck.Config.Freeware+"<br><br>"+
(linkcheck.i18n._("Start checking at %s\n") % \
(bk.i18n._("Start checking at %s\n") % \
linkcheck.logger.strtime(self.starttime))+
"<br>")
self.flush()
@ -76,7 +76,7 @@ class HtmlLogger (linkcheck.logger.StandardLogger.StandardLogger):
"<td bgcolor=\""+self.colorurl+"\">"+self.field("url")+"</td>\n"+
"<td bgcolor=\""+self.colorurl+"\">"+urlData.urlName)
if urlData.cached:
self.fd.write(linkcheck.i18n._(" (cached)"))
self.fd.write(bk.i18n._(" (cached)"))
self.fd.write("</td>\n</tr>\n")
if urlData.name and self.has_field("name"):
self.fd.write("<tr>\n<td>"+self.field("name")+"</td>\n<td>"+
@ -84,23 +84,23 @@ class HtmlLogger (linkcheck.logger.StandardLogger.StandardLogger):
if urlData.parentName and self.has_field("parenturl"):
self.fd.write("<tr>\n<td>"+self.field("parenturl")+
'</td>\n<td><a target="top" href="'+
linkcheck.url.url_quote(urlData.parentName or "")+'">'+
linkcheck.url.url_quote(urlData.parentName or "")+"</a>")
bk.url.url_quote(urlData.parentName or "")+'">'+
bk.url.url_quote(urlData.parentName or "")+"</a>")
if urlData.line:
self.fd.write(linkcheck.i18n._(", line %d")%urlData.line)
self.fd.write(bk.i18n._(", line %d")%urlData.line)
if urlData.column:
self.fd.write(linkcheck.i18n._(", col %d")%urlData.column)
self.fd.write(bk.i18n._(", col %d")%urlData.column)
self.fd.write("</td>\n</tr>\n")
if urlData.baseRef and self.has_field("base"):
self.fd.write("<tr>\n<td>"+self.field("base")+"</td>\n<td>"+
urlData.baseRef+"</td>\n</tr>\n")
if urlData.url and self.has_field("realurl"):
self.fd.write("<tr>\n<td>"+self.field("realurl")+"</td>\n<td>"+
'<a target="top" href="'+linkcheck.url.url_quote(urlData.url)+
'">'+linkcheck.url.url_quote(urlData.url)+"</a></td>\n</tr>\n")
'<a target="top" href="'+bk.url.url_quote(urlData.url)+
'">'+bk.url.url_quote(urlData.url)+"</a></td>\n</tr>\n")
if urlData.dltime>=0 and self.has_field("dltime"):
self.fd.write("<tr>\n<td>"+self.field("dltime")+"</td>\n<td>"+
(linkcheck.i18n._("%.3f seconds") % urlData.dltime)+
(bk.i18n._("%.3f seconds") % urlData.dltime)+
"</td>\n</tr>\n")
if urlData.dlsize>=0 and self.has_field("dlsize"):
self.fd.write("<tr>\n<td>"+self.field("dlsize")+"</td>\n<td>"+
@ -109,7 +109,7 @@ class HtmlLogger (linkcheck.logger.StandardLogger.StandardLogger):
if urlData.checktime and self.has_field("checktime"):
self.fd.write("<tr>\n<td>"+self.field("checktime")+
"</td>\n<td>"+
(linkcheck.i18n._("%.3f seconds") % urlData.checktime)+
(bk.i18n._("%.3f seconds") % urlData.checktime)+
"</td>\n</tr>\n")
if urlData.infoString and self.has_field("info"):
self.fd.write("<tr>\n<td>"+self.field("info")+"</td>\n<td>"+
@ -140,32 +140,32 @@ class HtmlLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.fd is None:
return
if self.has_field("outro"):
self.fd.write("\n"+linkcheck.i18n._("Thats it. "))
self.fd.write("\n"+bk.i18n._("Thats it. "))
#if self.warnings==1:
# self.fd.write(linkcheck.i18n._("1 warning, "))
# self.fd.write(bk.i18n._("1 warning, "))
#else:
# self.fd.write(str(self.warnings)+linkcheck.i18n._(" warnings, "))
# self.fd.write(str(self.warnings)+bk.i18n._(" warnings, "))
if self.errors==1:
self.fd.write(linkcheck.i18n._("1 error"))
self.fd.write(bk.i18n._("1 error"))
else:
self.fd.write(str(self.errors)+linkcheck.i18n._(" errors"))
self.fd.write(str(self.errors)+bk.i18n._(" errors"))
if linknumber >= 0:
if linknumber == 1:
self.fd.write(linkcheck.i18n._(" in 1 link"))
self.fd.write(bk.i18n._(" in 1 link"))
else:
self.fd.write(linkcheck.i18n._(" in %d links") % linknumber)
self.fd.write(linkcheck.i18n._(" found")+"\n<br>")
self.fd.write(bk.i18n._(" in %d links") % linknumber)
self.fd.write(bk.i18n._(" found")+"\n<br>")
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.fd.write(linkcheck.i18n._("Stopped checking at %s (%s)\n")%\
self.fd.write(bk.i18n._("Stopped checking at %s (%s)\n")%\
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration)))
self.fd.write("</blockquote><br><hr noshade size=\"1\"><small>"+
linkcheck.Config.HtmlAppInfo+"<br>")
self.fd.write(linkcheck.i18n._("Get the newest version at %s\n") %\
self.fd.write(bk.i18n._("Get the newest version at %s\n") %\
('<a href="'+linkcheck.Config.Url+'" target="_top">'+linkcheck.Config.Url+
"</a>.<br>"))
self.fd.write(linkcheck.i18n._("Write comments and bugs to %s\n\n") %\
self.fd.write(bk.i18n._("Write comments and bugs to %s\n\n") %\
('<a href="mailto:'+linkcheck.Config.Email+'">'+linkcheck.Config.Email+"</a>."))
self.fd.write("</small></body></html>")
self.flush()

View file

@ -15,24 +15,24 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import linkcheck.i18n
import bk.i18n
class Logger (object):
Fields = {
"realurl": linkcheck.i18n._("Real URL"),
"result": linkcheck.i18n._("Result"),
"base": linkcheck.i18n._("Base"),
"name": linkcheck.i18n._("Name"),
"parenturl": linkcheck.i18n._("Parent URL"),
"extern": linkcheck.i18n._("Extern"),
"info": linkcheck.i18n._("Info"),
"warning": linkcheck.i18n._("Warning"),
"dltime": linkcheck.i18n._("D/L Time"),
"dlsize": linkcheck.i18n._("D/L Size"),
"checktime": linkcheck.i18n._("Check Time"),
"url": linkcheck.i18n._("URL"),
"realurl": bk.i18n._("Real URL"),
"result": bk.i18n._("Result"),
"base": bk.i18n._("Base"),
"name": bk.i18n._("Name"),
"parenturl": bk.i18n._("Parent URL"),
"extern": bk.i18n._("Extern"),
"info": bk.i18n._("Info"),
"warning": bk.i18n._("Warning"),
"dltime": bk.i18n._("D/L Time"),
"dlsize": bk.i18n._("D/L Size"),
"checktime": bk.i18n._("Check Time"),
"url": bk.i18n._("URL"),
}
def __init__ (self, **args):

View file

@ -17,7 +17,7 @@
import time
import linkcheck
import linkcheck.i18n
import bk.i18n
import linkcheck.logger.StandardLogger
import linkcheck.logger.Logger
@ -49,10 +49,10 @@ class SQLLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.fd is None: return
self.starttime = time.time()
if self.has_field("intro"):
self.fd.write("-- "+(linkcheck.i18n._("created by %s at %s\n") % (linkcheck.Config.AppName,
self.fd.write("-- "+(bk.i18n._("created by %s at %s\n") % (linkcheck.Config.AppName,
linkcheck.strtime(self.starttime))))
self.fd.write("-- "+(linkcheck.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url))
self.fd.write("-- "+(linkcheck.i18n._("Write comments and bugs to %s\n\n") % \
self.fd.write("-- "+(bk.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url))
self.fd.write("-- "+(bk.i18n._("Write comments and bugs to %s\n\n") % \
linkcheck.Config.Email))
self.flush()
@ -66,14 +66,14 @@ class SQLLogger (linkcheck.logger.StandardLogger.StandardLogger):
(self.dbname,
sqlify(urlData.urlName),
urlData.recursionLevel,
sqlify(linkcheck.url.url_quote(urlData.parentName or "")),
sqlify(bk.url.url_quote(urlData.parentName or "")),
sqlify(urlData.baseRef),
sqlify(urlData.errorString),
sqlify(urlData.validString),
sqlify(urlData.warningString),
sqlify(urlData.infoString),
urlData.valid,
sqlify(linkcheck.url.url_quote(urlData.url)),
sqlify(bk.url.url_quote(urlData.url)),
urlData.line,
urlData.column,
sqlify(urlData.name),
@ -89,7 +89,7 @@ class SQLLogger (linkcheck.logger.StandardLogger.StandardLogger):
if self.has_field("outro"):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.fd.write("-- "+linkcheck.i18n._("Stopped checking at %s (%s)\n")%\
self.fd.write("-- "+bk.i18n._("Stopped checking at %s (%s)\n")%\
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration)))
self.flush()

View file

@ -17,7 +17,7 @@
import sys
import time
import linkcheck.i18n
import bk.i18n
import linkcheck.logger.Logger
@ -73,9 +73,9 @@ __init__(self, **args)
self.starttime = time.time()
if self.has_field('intro'):
self.fd.write("%s\n%s\n" % (linkcheck.Config.AppInfo, linkcheck.Config.Freeware))
self.fd.write(linkcheck.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(linkcheck.i18n._("Write comments and bugs to %s\n\n") % linkcheck.Config.Email)
self.fd.write(linkcheck.i18n._("Start checking at %s\n") % linkcheck.logger.strtime(self.starttime))
self.fd.write(bk.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(bk.i18n._("Write comments and bugs to %s\n\n") % linkcheck.Config.Email)
self.fd.write(bk.i18n._("Start checking at %s\n") % linkcheck.logger.strtime(self.starttime))
self.flush()
def newUrl (self, urlData):
@ -85,7 +85,7 @@ __init__(self, **args)
self.fd.write("\n"+self.field('url')+self.spaces('url')+
urlData.urlName)
if urlData.cached:
self.fd.write(linkcheck.i18n._(" (cached)\n"))
self.fd.write(bk.i18n._(" (cached)\n"))
else:
self.fd.write("\n")
if urlData.name and self.has_field('name'):
@ -93,24 +93,24 @@ __init__(self, **args)
urlData.name+"\n")
if urlData.parentName and self.has_field('parenturl'):
self.fd.write(self.field('parenturl')+self.spaces("parenturl")+
linkcheck.url.url_quote(urlData.parentName or "")+
(linkcheck.i18n._(", line %d")%urlData.line)+
(linkcheck.i18n._(", col %d")%urlData.column)+"\n")
bk.url.url_quote(urlData.parentName or "")+
(bk.i18n._(", line %d")%urlData.line)+
(bk.i18n._(", col %d")%urlData.column)+"\n")
if urlData.baseRef and self.has_field('base'):
self.fd.write(self.field("base")+self.spaces("base")+
urlData.baseRef+"\n")
if urlData.url and self.has_field('realurl'):
self.fd.write(self.field("realurl")+self.spaces("realurl")+
linkcheck.url.url_quote(urlData.url)+"\n")
bk.url.url_quote(urlData.url)+"\n")
if urlData.dltime>=0 and self.has_field('dltime'):
self.fd.write(self.field("dltime")+self.spaces("dltime")+
linkcheck.i18n._("%.3f seconds\n") % urlData.dltime)
bk.i18n._("%.3f seconds\n") % urlData.dltime)
if urlData.dlsize>=0 and self.has_field('dlsize'):
self.fd.write(self.field("dlsize")+self.spaces("dlsize")+
"%s\n"%linkcheck.StringUtil.strsize(urlData.dlsize))
if urlData.checktime and self.has_field('checktime'):
self.fd.write(self.field("checktime")+self.spaces("checktime")+
linkcheck.i18n._("%.3f seconds\n") % urlData.checktime)
bk.i18n._("%.3f seconds\n") % urlData.checktime)
if urlData.infoString and self.has_field('info'):
self.fd.write(self.field("info")+self.spaces("info")+
linkcheck.StringUtil.indent(
@ -137,24 +137,24 @@ __init__(self, **args)
if self.fd is None:
return
if self.has_field('outro'):
self.fd.write(linkcheck.i18n._("\nThats it. "))
self.fd.write(bk.i18n._("\nThats it. "))
#if self.warnings==1:
# self.fd.write(linkcheck.i18n._("1 warning, "))
# self.fd.write(bk.i18n._("1 warning, "))
#else:
# self.fd.write(str(self.warnings)+linkcheck.i18n._(" warnings, "))
# self.fd.write(str(self.warnings)+bk.i18n._(" warnings, "))
if self.errors==1:
self.fd.write(linkcheck.i18n._("1 error"))
self.fd.write(bk.i18n._("1 error"))
else:
self.fd.write(str(self.errors)+linkcheck.i18n._(" errors"))
self.fd.write(str(self.errors)+bk.i18n._(" errors"))
if linknumber >= 0:
if linknumber == 1:
self.fd.write(linkcheck.i18n._(" in 1 link"))
self.fd.write(bk.i18n._(" in 1 link"))
else:
self.fd.write(linkcheck.i18n._(" in %d links") % linknumber)
self.fd.write(linkcheck.i18n._(" found\n"))
self.fd.write(bk.i18n._(" in %d links") % linknumber)
self.fd.write(bk.i18n._(" found\n"))
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.fd.write(linkcheck.i18n._("Stopped checking at %s (%s)\n") % \
self.fd.write(bk.i18n._("Stopped checking at %s (%s)\n") % \
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration)))
self.flush()

View file

@ -64,10 +64,10 @@ class XMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write('<?xml version="1.0"?>\n')
if self.has_field("intro"):
self.fd.write("<!--\n")
self.fd.write(" "+linkcheck.i18n._("created by %s at %s\n") % \
self.fd.write(" "+bk.i18n._("created by %s at %s\n") % \
(linkcheck.Config.AppName, linkcheck.logger.strtime(self.starttime)))
self.fd.write(" "+linkcheck.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(" "+linkcheck.i18n._("Write comments and bugs to %s\n\n") % \
self.fd.write(" "+bk.i18n._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(" "+bk.i18n._("Write comments and bugs to %s\n\n") % \
linkcheck.Config.Email)
self.fd.write("-->\n\n")
self.fd.write('<GraphXML>\n<graph isDirected="true">\n')
@ -85,7 +85,7 @@ class XMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.fd.write(">\n")
if self.has_field("realurl"):
self.fd.write(" <label>%s</label>\n" %\
xmlquote(linkcheck.url.url_quote(node.url)))
xmlquote(bk.url.url_quote(node.url)))
self.fd.write(" <data>\n")
if node.dltime>=0 and self.has_field("dltime"):
self.fd.write(" <dltime>%f</dltime>\n" % node.dltime)
@ -130,7 +130,7 @@ class XMLLogger (linkcheck.logger.StandardLogger.StandardLogger):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
self.fd.write("<!-- ")
self.fd.write(linkcheck.i18n._("Stopped checking at %s (%s)\n")%\
self.fd.write(bk.i18n._("Stopped checking at %s (%s)\n")%\
(linkcheck.logger.strtime(self.stoptime),
linkcheck.logger.strduration(duration)))
self.fd.write("-->")

View file

@ -1,98 +0,0 @@
# -*- coding: iso-8859-1 -*-
"""Supporting definitions for the Python regression test."""
from linkcheck.log.Logger import Logger
class Error (Exception):
"""Base class for regression test exceptions."""
class TestFailed (Error):
"""Test failed."""
class TestSkipped (Error):
"""Test skipped.
This can be raised to indicate that a test was deliberatly
skipped, but not because a feature wasn't available. For
example, if some resource can't be used, such as the network
appears to be unavailable, this should be raised instead of
TestFailed.
"""
verbose = True # Flag set to 0 by regrtest.py
def unload (name):
import sys
try:
del sys.modules[name]
except KeyError:
pass
def forget (modname):
unload(modname)
import sys, os
for dirname in sys.path:
try:
os.unlink(os.path.join(dirname, modname + '.pyc'))
except os.error:
pass
FUZZ = 1e-6
def fcmp (x, y): # fuzzy comparison function
if type(x) == type(0.0) or type(y) == type(0.0):
try:
x, y = coerce(x, y)
fuzz = (abs(x) + abs(y)) * FUZZ
if abs(x-y) <= fuzz:
return 0
except:
pass
elif type(x) == type(y) and type(x) in (type(()), type([])):
for i in range(min(len(x), len(y))):
outcome = fcmp(x[i], y[i])
if outcome <> 0:
return outcome
return cmp(len(x), len(y))
return cmp(x, y)
TESTFN = '@test' # Filename used for testing
def findfile (file, here=__file__):
import os
if os.path.isabs(file):
return file
import sys
path = sys.path
path = [os.path.dirname(here)] + path
for dn in path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
return file
class TestLogger (Logger):
""" Output for regression test """
def init (self):
pass
def newUrl (self, urlData):
print 'url', urlData.urlName
if urlData.cached:
print "cached"
if urlData.name:
print "name", urlData.name
if urlData.baseRef:
print "baseurl", urlData.baseRef
if urlData.infoString:
print "info", urlData.infoString
if urlData.warningString:
print "warning", urlData.warningString
if urlData.valid:
print "valid"
else:
print "error"
def endOfOutput (self, linknumber=-1):
pass