defer i18n of log field names

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@717 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2003-01-05 20:35:41 +00:00
parent 60d00b2865
commit 735ebda254
12 changed files with 202 additions and 182 deletions

View file

@ -64,8 +64,8 @@ target="_top">LinkChecker</a>)
<tr>
<td>Ausgabe:</td>
<td><select name="language">
<option value="C" selected>Englisch</option>
<option value="de">Deutsch</option>
<option value="de" selected>Deutsch</option>
<option value="C">Englisch</option>
<option value="fr">Französisch</option>
<option value="nl">Niederländisch</option>
</select>

View file

@ -58,10 +58,10 @@ target="_top">LinkChecker</a>)
<tr>
<td>Output language:</td>
<td><select name="language">
<option value="C" selected>English</option>
<option value="de">German</option>
<option value="fr">French</option>
<option value="nl">Dutch</option>
<option value="C" selected>English</option>
<option value="fr">French</option>
<option value="de">German</option>
</select>
</td>
<td>Check strict intern links:</td>

View file

@ -22,14 +22,11 @@ class BlacklistLogger (Logger):
we have only links on the list which failed for n days.
"""
def __init__ (self, **args):
apply(Logger.__init__, (self,), args)
Logger.__init__(self, **args)
self.errors = 0
self.blacklist = {}
self.filename = args['filename']
def init (self):
pass
def newUrl (self, urlData):
if urlData.valid:
self.blacklist[urlData.getCacheKey()] = None

View file

@ -24,13 +24,14 @@ class CSVLogger (StandardLogger):
separated by a semicolon.
"""
def __init__ (self, **args):
apply(StandardLogger.__init__, (self,), args)
StandardLogger.__init__(self, **args)
self.separator = args['separator']
def init (self):
StandardLogger.init(self)
if self.fd is None: return
self.starttime = time.time()
if self.logfield("intro"):
if self.has_field("intro"):
self.fd.write("# "+(_("created by %s at %s\n") % (Config.AppName,
strtime(self.starttime))))
self.fd.write("# "+(_("Get the newest version at %s\n") % Config.Url))
@ -82,7 +83,7 @@ class CSVLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
self.stoptime = time.time()
if self.logfield("outro"):
if self.has_field("outro"):
duration = self.stoptime - self.starttime
name = _("seconds")
self.fd.write("# "+_("Stopped checking at %s") % strtime(self.stoptime))

View file

@ -15,7 +15,6 @@
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys
from linkcheck.log import Spaces, LogFields
from StandardLogger import StandardLogger
from linkcheck import StringUtil, _
@ -65,8 +64,8 @@ class ColoredLogger (StandardLogger):
"""ANSI colorized output"""
def __init__ (self, **args):
StandardLogger.__init__(self, **args)
esc="\x1b[%sm"
apply(StandardLogger.__init__, (self,), args)
self.colorparent = esc % col_num(args['colorparent'])
self.colorurl = esc % col_num(args['colorurl'])
self.colorname = esc % col_num(args['colorname'])
@ -84,12 +83,13 @@ class ColoredLogger (StandardLogger):
def newUrl (self, urlData):
if self.fd is None: return
if self.logfield("parenturl"):
if self.has_field("parenturl"):
if urlData.parentName:
if self.currentPage != urlData.parentName:
if self.prefix:
self.fd.write("o\n")
self.fd.write("\n"+LogFields["parenturl"]+Spaces["parenturl"]+
self.fd.write("\n"+self.field("parenturl")+
self.spaces("parenturl")+
self.colorparent+urlData.parentName+
self.colorreset+"\n")
self.currentPage = urlData.parentName
@ -99,12 +99,12 @@ class ColoredLogger (StandardLogger):
self.fd.write("o\n")
self.prefix = 0
self.currentPage=None
if self.logfield("url"):
if self.has_field("url"):
if self.prefix:
self.fd.write("|\n+- ")
else:
self.fd.write("\n")
self.fd.write(LogFields["url"]+Spaces["url"]+self.colorurl+
self.fd.write(self.field("url")+self.spaces("url")+self.colorurl+
urlData.urlName+self.colorreset)
if urlData.line:
self.fd.write(_(", line %d")%urlData.line)
@ -115,66 +115,66 @@ class ColoredLogger (StandardLogger):
else:
self.fd.write("\n")
if urlData.name and self.logfield("name"):
if urlData.name and self.has_field("name"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["name"]+Spaces["name"]+self.colorname+
urlData.name+self.colorreset+"\n")
if urlData.baseRef and self.logfield("base"):
self.fd.write(self.field("name")+self.spaces("name")+
self.colorname+urlData.name+self.colorreset+"\n")
if urlData.baseRef and self.has_field("base"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["base"]+Spaces["base"]+self.colorbase+
urlData.baseRef+self.colorreset+"\n")
self.fd.write(self.field("base")+self.spaces("base")+
self.colorbase+urlData.baseRef+self.colorreset+"\n")
if urlData.url and self.logfield("realurl"):
if urlData.url and self.has_field("realurl"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["realurl"]+Spaces["realurl"]+self.colorreal+
urlData.url+self.colorreset+"\n")
if urlData.dltime>=0 and self.logfield("dltime"):
self.fd.write(self.field("realurl")+self.spaces("realurl")+
self.colorreal+urlData.url+self.colorreset+"\n")
if urlData.dltime>=0 and self.has_field("dltime"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["dltime"]+Spaces["dltime"]+
self.fd.write(self.field("dltime")+self.spaces("dltime")+
self.colordltime+
(_("%.3f seconds") % urlData.dltime)+
self.colorreset+"\n")
if urlData.dlsize>=0 and self.logfield("dlsize"):
if urlData.dlsize>=0 and self.has_field("dlsize"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["dlsize"]+Spaces["dlsize"]+
self.fd.write(self.field("dlsize")+self.spaces("dlsize")+
self.colordlsize+StringUtil.strsize(urlData.dlsize)+
self.colorreset+"\n")
if urlData.checktime and self.logfield("checktime"):
if urlData.checktime and self.has_field("checktime"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["checktime"]+Spaces["checktime"]+
self.fd.write(self.field("checktime")+self.spaces("checktime")+
self.colordltime+
(_("%.3f seconds") % urlData.checktime)+self.colorreset+"\n")
if urlData.infoString and self.logfield("info"):
if urlData.infoString and self.has_field("info"):
if self.prefix:
self.fd.write("| "+LogFields["info"]+Spaces["info"]+
self.fd.write("| "+self.field("info")+self.spaces("info")+
StringUtil.indentWith(StringUtil.blocktext(
urlData.infoString, 65), "| "+Spaces["info"]))
urlData.infoString, 65), "| "+self.spaces("info")))
else:
self.fd.write(LogFields["info"]+Spaces["info"]+
self.fd.write(self.field("info")+self.spaces("info")+
StringUtil.indentWith(StringUtil.blocktext(
urlData.infoString, 65), " "+Spaces["info"]))
urlData.infoString, 65), " "+self.spaces("info")))
self.fd.write(self.colorreset+"\n")
if urlData.warningString:
#self.warnings += 1
if self.logfield("warning"):
if self.has_field("warning"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["warning"]+Spaces["warning"]+
self.fd.write(self.field("warning")+self.spaces("warning")+
self.colorwarning+
urlData.warningString+self.colorreset+"\n")
if self.logfield("result"):
if self.has_field("result"):
if self.prefix:
self.fd.write("| ")
self.fd.write(LogFields["result"]+Spaces["result"])
self.fd.write(self.field("result")+self.spaces("result"))
if urlData.valid:
self.fd.write(self.colorvalid+urlData.validString+
self.colorreset+"\n")
@ -186,7 +186,7 @@ class ColoredLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
if self.logfield("outro"):
if self.has_field("outro"):
if self.prefix:
self.fd.write("o\n")
StandardLogger.endOfOutput(self, linknumber=linknumber)

View file

@ -23,14 +23,15 @@ class GMLLogger (StandardLogger):
your sitemap graph.
"""
def __init__ (self, **args):
apply(StandardLogger.__init__, (self,), args)
StandardLogger.__init__(self, **args)
self.nodes = {}
self.nodeid = 0
def init (self):
StandardLogger.init(self)
if self.fd is None: return
self.starttime = time.time()
if self.logfield("intro"):
if self.has_field("intro"):
self.fd.write("# "+(linkcheck._("created by %s at %s\n") % (linkcheck.Config.AppName,
strtime(self.starttime))))
self.fd.write("# "+(linkcheck._("Get the newest version at %s\n") % linkcheck.Config.Url))
@ -49,15 +50,15 @@ class GMLLogger (StandardLogger):
self.nodeid += 1
self.fd.write(" node [\n")
self.fd.write(" id %d\n" % node.id)
if self.logfield("realurl"):
if self.has_field("realurl"):
self.fd.write(' label "%s"\n' % node.url)
if node.dltime>=0 and self.logfield("dltime"):
if node.dltime>=0 and self.has_field("dltime"):
self.fd.write(" dltime %d\n" % node.dltime)
if node.dlsize>=0 and self.logfield("dlsize"):
if node.dlsize>=0 and self.has_field("dlsize"):
self.fd.write(" dlsize %d\n" % node.dlsize)
if node.checktime and self.logfield("checktime"):
if node.checktime and self.has_field("checktime"):
self.fd.write(" checktime %d\n" % node.checktime)
if self.logfield("extern"):
if self.has_field("extern"):
self.fd.write(" extern %d\n" % (node.extern and 1 or 0))
self.fd.write(" ]\n")
self.writeEdges()
@ -70,11 +71,11 @@ class GMLLogger (StandardLogger):
if self.nodes.has_key(node.parentName):
self.fd.write(" edge [\n")
self.fd.write(' label "%s"\n' % node.urlName)
if self.logfield("parenturl"):
if self.has_field("parenturl"):
self.fd.write(" source %d\n" % \
self.nodes[node.parentName].id)
self.fd.write(" target %d\n" % node.id)
if self.logfield("result"):
if self.has_field("result"):
self.fd.write(" valid %d\n" % (node.valid and 1 or 0))
self.fd.write(" ]\n")
self.fd.flush()
@ -82,7 +83,7 @@ class GMLLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
self.fd.write("]\n")
if self.logfield("outro"):
if self.has_field("outro"):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = linkcheck._("seconds")

View file

@ -15,7 +15,7 @@
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from StandardLogger import StandardLogger
from linkcheck.log import strtime, LogFields
from linkcheck.log import strtime
from linkcheck import StringUtil, _, Config
import time
@ -37,7 +37,7 @@ class HtmlLogger (StandardLogger):
"""Logger with HTML output"""
def __init__ (self, **args):
apply(StandardLogger.__init__, (self,), args)
StandardLogger.__init__(self, **args)
self.colorbackground = args['colorbackground']
self.colorurl = args['colorurl']
self.colorborder = args['colorborder']
@ -47,11 +47,12 @@ class HtmlLogger (StandardLogger):
self.tableok = args['tableok']
def init (self):
StandardLogger.init(self)
if self.fd is None: return
self.starttime = time.time()
self.fd.write(HTML_HEADER%(Config.App, self.colorbackground,
self.colorlink, self.colorlink, self.colorlink))
if self.logfield('intro'):
if self.has_field('intro'):
self.fd.write("<center><h2>"+Config.App+"</h2></center>"+
"<br><blockquote>"+Config.Freeware+"<br><br>"+
(_("Start checking at %s\n") % strtime(self.starttime))+
@ -67,18 +68,18 @@ class HtmlLogger (StandardLogger):
"<td>\n"+
"<table align=left border=0 cellspacing=0 cellpadding=3\n"+
" summary=\"checked link\" bgcolor="+self.colorbackground+">\n")
if self.logfield("url"):
if self.has_field("url"):
self.fd.write("<tr>\n"+
"<td bgcolor="+self.colorurl+">"+LogFields["url"]+"</td>\n"+
"<td bgcolor="+self.colorurl+">"+self.field("url")+"</td>\n"+
"<td bgcolor="+self.colorurl+">"+urlData.urlName)
if urlData.cached:
self.fd.write(_(" (cached)"))
self.fd.write("</td>\n</tr>\n")
if urlData.name and self.logfield("name"):
self.fd.write("<tr>\n<td>"+LogFields["name"]+"</td>\n<td>"+
if urlData.name and self.has_field("name"):
self.fd.write("<tr>\n<td>"+self.field("name")+"</td>\n<td>"+
urlData.name+"</td>\n</tr>\n")
if urlData.parentName and self.logfield("parenturl"):
self.fd.write("<tr>\n<td>"+LogFields["parenturl"]+
if urlData.parentName and self.has_field("parenturl"):
self.fd.write("<tr>\n<td>"+self.field("parenturl")+
'</td>\n<td><a target="top" href="'+urlData.parentName+'">'+
urlData.parentName+"</a>")
if urlData.line:
@ -86,46 +87,46 @@ class HtmlLogger (StandardLogger):
if urlData.column:
self.fd.write(_(", col %d")%urlData.column)
self.fd.write("</td>\n</tr>\n")
if urlData.baseRef and self.logfield("base"):
self.fd.write("<tr>\n<td>"+LogFields["base"]+"</td>\n<td>"+
if urlData.baseRef and self.has_field("base"):
self.fd.write("<tr>\n<td>"+self.field("base")+"</td>\n<td>"+
urlData.baseRef+"</td>\n</tr>\n")
if urlData.url and self.logfield("realurl"):
self.fd.write("<tr>\n<td>"+LogFields["realurl"]+"</td>\n<td>"+
if urlData.url and self.has_field("realurl"):
self.fd.write("<tr>\n<td>"+self.field("realurl")+"</td>\n<td>"+
'<a target="top" href="'+urlData.url+
'">'+urlData.url+"</a></td>\n</tr>\n")
if urlData.dltime>=0 and self.logfield("dltime"):
self.fd.write("<tr>\n<td>"+LogFields["dltime"]+"</td>\n<td>"+
if urlData.dltime>=0 and self.has_field("dltime"):
self.fd.write("<tr>\n<td>"+self.field("dltime")+"</td>\n<td>"+
(_("%.3f seconds") % urlData.dltime)+
"</td>\n</tr>\n")
if urlData.dlsize>=0 and self.logfield("dlsize"):
self.fd.write("<tr>\n<td>"+LogFields["dlsize"]+"</td>\n<td>"+
if urlData.dlsize>=0 and self.has_field("dlsize"):
self.fd.write("<tr>\n<td>"+self.field("dlsize")+"</td>\n<td>"+
StringUtil.strsize(urlData.dlsize)+
"</td>\n</tr>\n")
if urlData.checktime and self.logfield("checktime"):
self.fd.write("<tr>\n<td>"+LogFields["checktime"]+
if urlData.checktime and self.has_field("checktime"):
self.fd.write("<tr>\n<td>"+self.field("checktime")+
"</td>\n<td>"+
(_("%.3f seconds") % urlData.checktime)+
"</td>\n</tr>\n")
if urlData.infoString and self.logfield("info"):
self.fd.write("<tr>\n<td>"+LogFields["info"]+"</td>\n<td>"+
if urlData.infoString and self.has_field("info"):
self.fd.write("<tr>\n<td>"+self.field("info")+"</td>\n<td>"+
StringUtil.htmlify(urlData.infoString)+
"</td>\n</tr>\n")
if urlData.warningString:
#self.warnings += 1
if self.logfield("warning"):
if self.has_field("warning"):
self.fd.write("<tr>\n"+
self.tablewarning+LogFields["warning"]+
self.tablewarning+self.field("warning")+
"</td>\n"+self.tablewarning+
urlData.warningString.replace("\n", "<br>")+
"</td>\n</tr>\n")
if self.logfield("result"):
if self.has_field("result"):
if urlData.valid:
self.fd.write("<tr>\n"+self.tableok+
LogFields["result"]+"</td>\n"+
self.field("result")+"</td>\n"+
self.tableok+urlData.validString+"</td>\n</tr>\n")
else:
self.errors += 1
self.fd.write("<tr>\n"+self.tableerror+LogFields["result"]+
self.fd.write("<tr>\n"+self.tableerror+self.field("result")+
"</td>\n"+self.tableerror+
urlData.errorString+"</td>\n</tr>\n")
self.fd.write("</table></td></tr></table><br clear=all>")
@ -133,7 +134,7 @@ class HtmlLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
if self.logfield("outro"):
if self.has_field("outro"):
self.fd.write("\n"+_("Thats it. "))
#if self.warnings==1:
# self.fd.write(_("1 warning, "))

View file

@ -14,20 +14,54 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from linkcheck import _
class Logger:
Fields = {
"realurl": lambda: _("Real URL"),
"result": lambda: _("Result"),
"base": lambda: _("Base"),
"name": lambda: _("Name"),
"parenturl": lambda: _("Parent URL"),
"extern": lambda: _("Extern"),
"info": lambda: _("Info"),
"warning": lambda: _("Warning"),
"dltime": lambda: _("D/L Time"),
"dlsize": lambda: _("D/L Size"),
"checktime": lambda: _("Check Time"),
"url": lambda: _("URL"),
}
def __init__ (self, **args):
self.logfields = None # all fields
self.logfields = None # log all fields
if args.has_key('fields'):
if "all" not in args['fields']:
self.logfields = args['fields']
def logfield (self, name):
def has_field (self, name):
if self.logfields is None:
# log all fields
return 1
return name in self.logfields
def field (self, name):
return self.Fields[name]()
def spaces (self, name):
return self.logspaces[name]
def init (self):
raise Exception, "abstract function"
# map with spaces between field name and value
self.logspaces = {}
if self.logfields is None:
fields = self.Fields.keys()
else:
fields = self.logfields
values = [self.field(x) for x in fields]
# maximum indent for localized log field names
self.max_indent = max(map(lambda x: len(x), values))+1
for key in fields:
self.logspaces[key] = " "*(self.max_indent - len(self.field(key)))
def newUrl (self, urlData):
raise Exception, "abstract function"
@ -39,5 +73,4 @@ class Logger:
return self.__class__.__name__
def __repr__ (self):
return self.__class__.__name__
return `self.__class__.__name__`

View file

@ -22,14 +22,15 @@ from linkcheck import StringUtil, _, Config
class SQLLogger (StandardLogger):
""" SQL output for PostgreSQL, not tested"""
def __init__ (self, **args):
apply(StandardLogger.__init__, (self,), args)
StandardLogger.__init__(self, **args)
self.dbname = args['dbname']
self.separator = args['separator']
def init (self):
StandardLogger.init(self)
if self.fd is None: return
self.starttime = time.time()
if self.logfield("intro"):
if self.has_field("intro"):
self.fd.write("-- "+(_("created by %s at %s\n") % (Config.AppName,
strtime(self.starttime))))
self.fd.write("-- "+(_("Get the newest version at %s\n") % Config.Url))
@ -67,7 +68,7 @@ class SQLLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
if self.logfield("outro"):
if self.has_field("outro"):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = _("seconds")

View file

@ -14,9 +14,10 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys, time, linkcheck, linkcheck.Config
import sys, time
from linkcheck import Config, _
from Logger import Logger
from linkcheck.log import LogFields, Spaces, strtime, MaxIndent
from linkcheck.log import strtime
from linkcheck import StringUtil
class StandardLogger (Logger):
@ -54,7 +55,7 @@ __init__(self, **args)
"""
def __init__ (self, **args):
apply(Logger.__init__, (self,), args)
Logger.__init__(self, **args)
self.errors = 0
#self.warnings = 0
if args.has_key('fileoutput'):
@ -65,59 +66,64 @@ __init__(self, **args)
self.fd = sys.stdout
def init (self):
Logger.init(self)
if self.fd is None: return
self.starttime = time.time()
if self.logfield('intro'):
self.fd.write("%s\n%s\n" % (linkcheck.Config.AppInfo, linkcheck.Config.Freeware))
self.fd.write(linkcheck._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(linkcheck._("Write comments and bugs to %s\n\n") % linkcheck.Config.Email)
self.fd.write(linkcheck._("Start checking at %s\n") % linkcheck.log.strtime(self.starttime))
if self.has_field('intro'):
self.fd.write("%s\n%s\n" % (Config.AppInfo, Config.Freeware))
self.fd.write(_("Get the newest version at %s\n") % Config.Url)
self.fd.write(_("Write comments and bugs to %s\n\n") % Config.Email)
self.fd.write(_("Start checking at %s\n") % strtime(self.starttime))
self.fd.flush()
def newUrl (self, urlData):
if self.fd is None: return
if self.logfield('url'):
self.fd.write("\n"+LogFields['url']+Spaces['url']+urlData.urlName)
if self.has_field('url'):
self.fd.write("\n"+self.field('url')+self.spaces('url')+
urlData.urlName)
if urlData.cached:
self.fd.write(linkcheck._(" (cached)\n"))
self.fd.write(_(" (cached)\n"))
else:
self.fd.write("\n")
if urlData.name and self.logfield('name'):
self.fd.write(LogFields["name"]+Spaces["name"]+urlData.name+"\n")
if urlData.parentName and self.logfield('parenturl'):
self.fd.write(LogFields['parenturl']+Spaces["parenturl"]+
if urlData.name and self.has_field('name'):
self.fd.write(self.field("name")+self.spaces("name")+
urlData.name+"\n")
if urlData.parentName and self.has_field('parenturl'):
self.fd.write(self.field('parenturl')+self.spaces("parenturl")+
urlData.parentName+
(linkcheck._(", line %d")%urlData.line)+
(linkcheck._(", col %d")%urlData.column)+"\n")
if urlData.baseRef and self.logfield('base'):
self.fd.write(LogFields["base"]+Spaces["base"]+urlData.baseRef+"\n")
if urlData.url and self.logfield('realurl'):
self.fd.write(LogFields["realurl"]+Spaces["realurl"]+urlData.url+"\n")
if urlData.dltime>=0 and self.logfield('dltime'):
self.fd.write(LogFields["dltime"]+Spaces["dltime"]+
linkcheck._("%.3f seconds\n") % urlData.dltime)
if urlData.dlsize>=0 and self.logfield('dlsize'):
self.fd.write(LogFields["dlsize"]+Spaces["dlsize"]+
(_(", line %d")%urlData.line)+
(_(", col %d")%urlData.column)+"\n")
if urlData.baseRef and self.has_field('base'):
self.fd.write(self.field("base")+self.spaces("base")+
urlData.baseRef+"\n")
if urlData.url and self.has_field('realurl'):
self.fd.write(self.field("realurl")+self.spaces("realurl")+
urlData.url+"\n")
if urlData.dltime>=0 and self.has_field('dltime'):
self.fd.write(self.field("dltime")+self.spaces("dltime")+
_("%.3f seconds\n") % urlData.dltime)
if urlData.dlsize>=0 and self.has_field('dlsize'):
self.fd.write(self.field("dlsize")+self.spaces("dlsize")+
"%s\n"%StringUtil.strsize(urlData.dlsize))
if urlData.checktime and self.logfield('checktime'):
self.fd.write(LogFields["checktime"]+Spaces["checktime"]+
linkcheck._("%.3f seconds\n") % urlData.checktime)
if urlData.infoString and self.logfield('info'):
self.fd.write(LogFields["info"]+Spaces["info"]+
if urlData.checktime and self.has_field('checktime'):
self.fd.write(self.field("checktime")+self.spaces("checktime")+
_("%.3f seconds\n") % urlData.checktime)
if urlData.infoString and self.has_field('info'):
self.fd.write(self.field("info")+self.spaces("info")+
StringUtil.indent(
StringUtil.blocktext(urlData.infoString, 65),
MaxIndent)+"\n")
self.max_indent)+"\n")
if urlData.warningString:
#self.warnings += 1
if self.logfield('warning'):
self.fd.write(LogFields["warning"]+Spaces["warning"]+
if self.has_field('warning'):
self.fd.write(self.field("warning")+self.spaces("warning")+
StringUtil.indent(
StringUtil.blocktext(urlData.warningString, 65),
MaxIndent)+"\n")
self.max_indent)+"\n")
if self.logfield('result'):
self.fd.write(LogFields["result"]+Spaces["result"])
if self.has_field('result'):
self.fd.write(self.field("result")+self.spaces("result"))
if urlData.valid:
self.fd.write(urlData.validString+"\n")
else:
@ -125,34 +131,35 @@ __init__(self, **args)
self.fd.write(urlData.errorString+"\n")
self.fd.flush()
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
if self.logfield('outro'):
self.fd.write(linkcheck._("\nThats it. "))
if self.has_field('outro'):
self.fd.write(_("\nThats it. "))
#if self.warnings==1:
# self.fd.write(linkcheck._("1 warning, "))
# self.fd.write(_("1 warning, "))
#else:
# self.fd.write(str(self.warnings)+linkcheck._(" warnings, "))
# self.fd.write(str(self.warnings)+_(" warnings, "))
if self.errors==1:
self.fd.write(linkcheck._("1 error"))
self.fd.write(_("1 error"))
else:
self.fd.write(str(self.errors)+linkcheck._(" errors"))
self.fd.write(str(self.errors)+_(" errors"))
if linknumber >= 0:
if linknumber == 1:
self.fd.write(linkcheck._(" in 1 link"))
self.fd.write(_(" in 1 link"))
else:
self.fd.write(linkcheck._(" in %d links") % linknumber)
self.fd.write(linkcheck._(" found\n"))
self.fd.write(_(" in %d links") % linknumber)
self.fd.write(_(" found\n"))
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = linkcheck._("seconds")
self.fd.write(linkcheck._("Stopped checking at %s") % linkcheck.log.strtime(self.stoptime))
name = _("seconds")
self.fd.write(_("Stopped checking at %s") % strtime(self.stoptime))
if duration > 60:
duration = duration / 60
name = linkcheck._("minutes")
name = _("minutes")
if duration > 60:
duration = duration / 60
name = linkcheck._("hours")
name = _("hours")
self.fd.write(" (%.3f %s)\n" % (duration, name))
self.fd.flush()
self.fd = None

View file

@ -14,7 +14,8 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import time, linkcheck, linkcheck.Config
import time
from linkcheck import Config, _
from linkcheck.StringUtil import xmlify
from linkcheck.log import strtime
from StandardLogger import StandardLogger
@ -23,21 +24,22 @@ class XMLLogger (StandardLogger):
"""XML output mirroring the GML structure. Easy to parse with any XML
tool."""
def __init__ (self, **args):
apply(StandardLogger.__init__, (self,), args)
StandardLogger.__init__(self, **args)
self.nodes = {}
self.nodeid = 0
def init (self):
StandardLogger.init(self)
if self.fd is None: return
self.starttime = time.time()
self.fd.write('<?xml version="1.0"?>\n')
if self.logfield("intro"):
if self.has_field("intro"):
self.fd.write("<!--\n")
self.fd.write(" "+linkcheck._("created by %s at %s\n") % \
(linkcheck.Config.AppName, strtime(self.starttime)))
self.fd.write(" "+linkcheck._("Get the newest version at %s\n") % linkcheck.Config.Url)
self.fd.write(" "+linkcheck._("Write comments and bugs to %s\n\n") % \
linkcheck.Config.Email)
self.fd.write(" "+_("created by %s at %s\n") % \
(Config.AppName, strtime(self.starttime)))
self.fd.write(" "+_("Get the newest version at %s\n") % Config.Url)
self.fd.write(" "+_("Write comments and bugs to %s\n\n") % \
Config.Email)
self.fd.write("-->\n\n")
self.fd.write('<GraphXML>\n<graph isDirected="true">\n')
self.fd.flush()
@ -52,17 +54,17 @@ class XMLLogger (StandardLogger):
self.nodeid += 1
self.fd.write(' <node name="%d" ' % node.id)
self.fd.write(">\n")
if self.logfield("realurl"):
if self.has_field("realurl"):
self.fd.write(" <label>%s</label>\n" % xmlify(node.url))
self.fd.write(" <data>\n")
if node.dltime>=0 and self.logfield("dltime"):
if node.dltime>=0 and self.has_field("dltime"):
self.fd.write(" <dltime>%f</dltime>\n" % node.dltime)
if node.dlsize>=0 and self.logfield("dlsize"):
if node.dlsize>=0 and self.has_field("dlsize"):
self.fd.write(" <dlsize>%d</dlsize>\n" % node.dlsize)
if node.checktime and self.logfield("checktime"):
if node.checktime and self.has_field("checktime"):
self.fd.write(" <checktime>%f</checktime>\n" \
% node.checktime)
if self.logfield("extern"):
if self.has_field("extern"):
self.fd.write(" <extern>%d</extern>\n" % \
(node.extern and 1 or 0))
self.fd.write(" </data>\n")
@ -80,10 +82,10 @@ class XMLLogger (StandardLogger):
self.nodes[node.parentName].id)
self.fd.write(' target="%d"' % node.id)
self.fd.write(">\n")
if self.logfield("url"):
if self.has_field("url"):
self.fd.write(" <label>%s</label>\n" % xmlify(node.urlName))
self.fd.write(" <data>\n")
if self.logfield("result"):
if self.has_field("result"):
self.fd.write(" <valid>%d</valid>\n" % \
(node.valid and 1 or 0))
self.fd.write(" </data>\n")
@ -93,18 +95,18 @@ class XMLLogger (StandardLogger):
def endOfOutput (self, linknumber=-1):
if self.fd is None: return
self.fd.write("</graph>\n</GraphXML>\n")
if self.logfield("outro"):
if self.has_field("outro"):
self.stoptime = time.time()
duration = self.stoptime - self.starttime
name = linkcheck._("seconds")
name = _("seconds")
self.fd.write("<!-- ")
self.fd.write(linkcheck._("Stopped checking at %s") % strtime(self.stoptime))
self.fd.write(_("Stopped checking at %s") % strtime(self.stoptime))
if duration > 60:
duration = duration / 60
name = linkcheck._("minutes")
name = _("minutes")
if duration > 60:
duration = duration / 60
name = linkcheck._("hours")
name = _("hours")
self.fd.write(" (%.3f %s)\n" % (duration, name))
self.fd.write("-->")
self.fd.flush()

View file

@ -30,29 +30,6 @@ def strtimezone ():
zone = time.timezone
return "%+04d" % int(-zone/3600)
import linkcheck
LogFields = {
"realurl": linkcheck._("Real URL"),
"result": linkcheck._("Result"),
"base": linkcheck._("Base"),
"name": linkcheck._("Name"),
"parenturl": linkcheck._("Parent URL"),
"extern": linkcheck._("Extern"),
"info": linkcheck._("Info"),
"warning": linkcheck._("Warning"),
"dltime": linkcheck._("D/L Time"),
"dlsize": linkcheck._("D/L Size"),
"checktime": linkcheck._("Check Time"),
"url": linkcheck._("URL"),
}
# maximum indent for localized log field names
MaxIndent = max(map(lambda x: len(x), LogFields.values()))+1
# map with spaces between field name and value
Spaces = {}
for key,value in LogFields.items():
Spaces[key] = " "*(MaxIndent - len(value))
from StandardLogger import StandardLogger
from HtmlLogger import HtmlLogger
from ColoredLogger import ColoredLogger
@ -74,4 +51,4 @@ Loggers = {
"xml": XMLLogger,
}
# for easy printing: a comma separated logger list
LoggerKeys = reduce(lambda x, y: x+", "+y, Loggers.keys())
LoggerKeys = ", ".join(Loggers.keys())