link number

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@124 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2000-06-21 22:08:36 +00:00
parent 58be481d1b
commit cfcf9e14d2
3 changed files with 31 additions and 21 deletions

View file

@ -90,6 +90,7 @@ class Configuration(UserDict.UserDict):
def reset(self):
"""Reset to default values"""
self.data['linknumber'] = 0
self.data["verbose"] = 0
self.data["warnings"] = 0
self.data["anchors"] = 0
@ -180,6 +181,7 @@ class Configuration(UserDict.UserDict):
self.robotsTxtCache_get = self.robotsTxtCache_get_NoThreads
self.robotsTxtCache_set = self.robotsTxtCache_set_NoThreads
self.robotsTxtCacheLock = None
self.incrementLinknumber = self.incrementLinknumber_NoThreads
self.log_newUrl = self.log_newUrl_NoThreads
self.logLock = None
self.urls = []
@ -208,6 +210,7 @@ class Configuration(UserDict.UserDict):
self.robotsTxtCache_get = self.robotsTxtCache_get_Threads
self.robotsTxtCache_set = self.robotsTxtCache_set_Threads
self.robotsTxtCacheLock = Lock()
self.incrementLinknumber = self.incrementLinknumber_Threads
self.log_newUrl = self.log_newUrl_Threads
self.logLock = Lock()
self.urls = Queue.Queue(0)
@ -254,6 +257,9 @@ class Configuration(UserDict.UserDict):
def newLogger(self, name, dict={}):
return apply(Loggers[name], (), dictjoin(self.data[name],dict))
def incrementLinknumber_NoThreads(self):
self.data['linknumber'] = self.data['linknumber'] + 1
def log_newUrl_NoThreads(self, url):
if not self.data["quiet"]: self.data["log"].newUrl(url)
for log in self.data["fileoutput"]:
@ -265,9 +271,10 @@ class Configuration(UserDict.UserDict):
log.init()
def log_endOfOutput(self):
if not self.data["quiet"]: self.data["log"].endOfOutput()
if not self.data["quiet"]:
self.data["log"].endOfOutput(linknumber=self.data['linknumber'])
for log in self.data["fileoutput"]:
log.endOfOutput()
log.endOfOutput(linknumber=self.linknumber)
def connectNntp_NoThreads(self):
if not self.data.has_key("nntp"):
@ -281,6 +288,13 @@ class Configuration(UserDict.UserDict):
finally:
self.dataLock.release()
def incrementLinknumber_Threads(self):
try:
self.dataLock.acquire()
self.data['linknumber'] = self.data['linknumber'] + 1
finally:
self.dataLock.release()
def _do_connectNntp(self):
"""This is done only once per checking task."""
import nntplib

View file

@ -76,7 +76,6 @@ class StandardLogger:
def __init__(self, **args):
self.errors = 0
self.warnings = 0
self.linknumber = 0
if args.has_key('fileoutput'):
self.fd = open(args['filename'], "w")
elif args.has_key('fd'):
@ -95,7 +94,6 @@ class StandardLogger:
def newUrl(self, urldata):
self.linknumber = self.linknumber+1
self.fd.write("\n"+_("URL")+Spaces["URL"]+urldata.urlName)
if urldata.cached:
self.fd.write(_(" (cached)\n"))
@ -136,7 +134,7 @@ class StandardLogger:
self.fd.flush()
def endOfOutput(self):
def endOfOutput(self, linknumber=-1):
self.fd.write(_("\nThats it. "))
if self.warnings==1:
@ -147,10 +145,12 @@ class StandardLogger:
self.fd.write(_("1 error"))
else:
self.fd.write(str(self.errors)+_(" errors"))
if self.linknumber == 1:
self.fd.write(_(" in 1 link found\n"))
else:
self.fd.write(_(" in %d links found\n") % self.linknumber)
if linknumber >= 0:
if linknumber == 1:
self.fd.write(_(" in 1 link"))
else:
self.fd.write(_(" in %d links") % linknumber)
self.fd.write(_(" found\n"))
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = _("seconds")
@ -194,7 +194,6 @@ class HtmlLogger(StandardLogger):
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
self.fd.write("<table align=left border=\"0\" cellspacing=\"0\""
" cellpadding=\"1\" bgcolor="+self.colorborder+
"><tr><td><table align=left border=\"0\" cellspacing=\"0\""
@ -251,7 +250,7 @@ class HtmlLogger(StandardLogger):
self.fd.flush()
def endOfOutput(self):
def endOfOutput(self, linknumber=-1):
self.fd.write(MyFont+_("\nThats it. "))
if self.warnings==1:
self.fd.write(_("1 warning, "))
@ -261,11 +260,12 @@ class HtmlLogger(StandardLogger):
self.fd.write(_("1 error"))
else:
self.fd.write(str(self.errors)+_(" errors"))
if self.linknumber == 1:
self.fd.write(_(" in 1 link found\n"))
else:
self.fd.write(_(" in %d links found\n") % self.linknumber)
self.fd.write("<br>")
if linknumber >= 0:
if linknumber == 1:
self.fd.write(_(" in 1 link"))
else:
self.fd.write(_(" in %d links") % linknumber)
self.fd.write(" found<br>\n")
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = _("seconds")
@ -307,7 +307,6 @@ class ColoredLogger(StandardLogger):
self.prefix = 0
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
if urlData.parentName:
if self.currentPage != urlData.parentName:
if self.prefix:
@ -413,7 +412,6 @@ class GMLLogger(StandardLogger):
self.fd.flush()
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
self.nodes.append(urlData)
def endOfOutput(self):
@ -480,7 +478,6 @@ class SQLLogger(StandardLogger):
self.fd.flush()
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
self.fd.write("insert into %s(urlname,recursionlevel,parentname,"
"baseref,errorstring,validstring,warningstring,infoString,"
"valid,url,line,checktime,downloadtime,cached) values "
@ -534,7 +531,6 @@ class BlacklistLogger:
pass
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
if urlData.valid:
self.blacklist[urlData.getCacheKey()] = None
elif not urlData.cached:
@ -581,7 +577,6 @@ class CSVLogger(StandardLogger):
self.fd.flush()
def newUrl(self, urlData):
self.linknumber = self.linknumber+1
self.fd.write(
"%s%s%d%s%s%s%s%s%s%s%s%s%s%s%s%s%d%s%s%s%d%s%d%s%d%s%d\n" % (
urlData.urlName, self.separator,

View file

@ -136,6 +136,7 @@ class UrlData:
def logMe(self, config):
config.incrementLinknumber()
if config["verbose"] or not self.valid or \
(self.warningString and config["warnings"]):
config.log_newUrl(self)