i18n additions

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@99 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2000-06-03 12:50:19 +00:00
parent 6622e6cb84
commit de64233e40
9 changed files with 666 additions and 386 deletions

View file

@ -1,4 +1,6 @@
VERSION=$(shell ./setup.py --version)
# This Makefile is only used by developers! No need for users to
# call make.
VERSION=$(shell python setup.py --version)
HOST=treasure.calvinsplayground.de
PROXY=
#PROXY=-P$(HOST):5050
@ -31,14 +33,14 @@ TAR = tar
ZIP = zip
all:
@echo "run python setup.py --help to see how to install"
@echo "run python setup.py --help to see how to build and install"
clean:
./setup.py clean --all
python setup.py clean --all
rm -rf $(ALLPACKAGES) $(PACKAGE)-out.*
dist:
./setup.py sdist
python setup.py sdist bdist_rpm
fakeroot debian/rules binary
files:
@ -55,11 +57,19 @@ test:
done
po:
# german translation
xgettext --default-domain=linkcheck \
--join-existing --keyword --keyword=_ \
--output-dir=locale/de/LC_MESSAGES/ --sort-output $(SOURCES)
# french translation
#xgettext --default-domain=linkcheck \
#--join-existing --keyword --keyword=_ \
#--output-dir=locale/fr/LC_MESSAGES/ --sort-output $(SOURCES)
mo:
# german translation
msgfmt -o locale/de/LC_MESSAGES/linkcheck.mo \
locale/de/LC_MESSAGES/linkcheck.po
# french translation
#msgfmt -o locale/fr/LC_MESSAGES/linkcheck.mo \
#locale/fr/LC_MESSAGES/linkcheck.po

5
debian/changelog vendored
View file

@ -3,14 +3,15 @@ linkchecker (1.2.3) unstable; urgency=low
* typo fix for adjustWinPath
* added some source code documentation
* improved error messages for wrong options
* configuration file option for output filenames
* configuration file options for logger output
* linkchecker.bat installation support for windows
* included test suite in distribution
* blacklist output support
* CSV output support
* SSL autodetection in setup.py
* added GPL copyright header to each of my .py files
* i18n support and german translation of the logger outputs
* i18n support
* german translation
* use http_proxy environment variable if present
* be RFC822 and RFC2368 compliant when scanning mail syntax
* fix for incorrect line number in logger output (reported by Michael

6
debian/rules vendored
View file

@ -13,7 +13,7 @@ build-stamp:
dh_testdir
# Add here commands to compile the package.
$(MAKE)
python setup.py build
touch build-stamp
@ -23,7 +23,7 @@ clean:
rm -f build-stamp
# Add here commands to clean up after the build process.
-$(MAKE) clean
python setup.py clean --all
dh_clean
@ -33,7 +33,7 @@ install: build
dh_clean -k
dh_installdirs
# ha! the root option finally made it into distutils
./setup.py install --root=`pwd`/debian/tmp
python setup.py install --root=`pwd`/debian/tmp
# put the README of the DNS package in doc (only on Debian)
install -c -m 644 DNS/README debian/tmp/usr/share/doc/linkchecker/README.dns

View file

@ -69,9 +69,6 @@ def debug(msg):
def norm(path):
return normcase(normpath(expanduser(path)))
# the blacklist file
BlacklistFile = norm("~/.blacklist")
# dynamic options
class Configuration(UserDict.UserDict):
"""Dynamic options are stored in this class so you can run
@ -95,14 +92,48 @@ class Configuration(UserDict.UserDict):
self.data["robotstxt"] = 0
self.data["strict"] = 0
self.data["fileoutput"] = []
self.data["fileoutputnames"] = {
"text": "linkchecker-out.txt",
"html": "linkchecker-out.html",
"colored": "linkchecker-out.asc",
"gml": "linkchecker-out.gml",
"sql": "linkchecker-out.sql",
"csv": "linkchecker-out.csv",
# Logger configurations
self.data["text"] = {
"filename": "linkchecker-out.txt",
}
self.data['html'] = {
"filename": "linkchecker-out.html",
'colorbackground': '"#fff7e5"',
'colorurl': '"#dcd5cf"',
'colorborder': '"#000000"',
'colorlink': '"#191c83"',
'tablewarning': '<td bgcolor="#e0954e">',
'tableerror': '<td bgcolor="#db4930">',
'tableok': '<td bgcolor="#3ba557">',
}
ESC="\x1b"
self.data['colored'] = {
"filename": "linkchecker-out.ansi",
'colorparent': ESC+"[37m", # white
'colorurl': ESC+"[0m", # standard
'colorreal': ESC+"[36m", # cyan
'colorbase': ESC+"[35m", # magenty
'colorvalid': ESC+"[1;32m", # green
'colorinvalid': ESC+"[1;31m", # red
'colorinfo': ESC+"[0m", # standard
'colorwarning': ESC+"[1;33m", # yellow
'colordltime': ESC+"[0m", # standard
'colorreset': ESC+"[0m", # reset to standard
}
self.data['gml'] = {
"filename": "linkchecker-out.gml",
}
self.data['sql'] = {
"filename": "linkchecker-out.sql",
'separator': ';',
'dbname': 'linksdb',
}
self.data['csv'] = {
"filename": "linkchecker-out.csv",
}
self.data['blacklist'] = {
"filename": "~/.blacklist",
}
self.data["quiet"] = 0
self.data["warningregex"] = None
self.data["nntpserver"] = os.environ.get("NNTP_SERVER",None)
@ -204,7 +235,15 @@ class Configuration(UserDict.UserDict):
def robotsTxtCache_set_NoThreads(self, key, val):
self.robotsTxtCache[key] = val
def newLogger(self, name, fileout):
if fileout:
self.data['fileoutput'].append(apply(Loggers[name], (fileout,),
self.data[name]))
else:
self.data['log'] = apply(Loggers[name], (fileout,),
self.data[name])
def log_newUrl_NoThreads(self, url):
if not self.data["quiet"]: self.data["log"].newUrl(url)
for log in self.data["fileoutput"]:
@ -324,11 +363,16 @@ class Configuration(UserDict.UserDict):
def error(self, msg):
self.message("Config: ERROR: "+msg)
def message(self, msg):
sys.stderr.write(msg+"\n")
sys.stderr.flush()
def readConfig(self, files):
"""this big function reads all the configuration parameters
used in the linkchecker module.
"""
try:
cfgparser = ConfigParser.ConfigParser()
cfgparser.read(files)
@ -339,7 +383,7 @@ class Configuration(UserDict.UserDict):
try:
log = cfgparser.get(section, "log")
if Loggers.has_key(log):
self.data["log"] = Loggers[log]()
self.data["log"] = self.newLogger(log)
else:
self.warn("invalid log option "+log)
except ConfigParser.Error: pass
@ -352,21 +396,18 @@ class Configuration(UserDict.UserDict):
except ConfigParser.Error: pass
try: self.data["warnings"] = cfgparser.getboolean(section, "warnings")
except ConfigParser.Error: pass
try:
filenames = eval(cfgparser.get(section, "fileoutputnames"))
for key in filenames.keys():
if self.data["fileoutputnames"].has_key(key) and \
type(filenames[key]) == StringType:
self.data["fileoutputnames"][key] = filenames[key]
except ConfigParser.Error: pass
try:
filelist = string.split(cfgparser.get(section, "fileoutput"))
for arg in filelist:
# no file output for the blacklist Logger
if Loggers.has_key(arg) and arg != "blacklist":
self.data["fileoutput"].append(Loggers[arg](
open(self.data["fileoutputnames"][arg], "w")))
self.data["fileoutput"].append(self.newLogger(arg, 1))
except ConfigParser.Error: pass
for key in Loggers.keys():
if cfgparser.has_section(key):
for opt in cfgparser.options(key):
try: self.data[key][opt] = cfgparser.get(key, opt)
except ConfigParser.Error: pass
section="checking"
try:

View file

@ -28,32 +28,19 @@ newUrl(self,urlData)
endOfOutput(self)
Called at the end of checking to close filehandles and such.
Passing parameters to the constructor:
__init__(self, fileoutput=None, **args)
The fileoutput flag specifies if output goes to a file.
The args dictionary is filled in Config.py. There you can specify
default parameters. Adjust these parameters in the configuration
files in the appropriate logger section.
"""
import sys,time
import Config,StringUtil
from linkcheck import _
# ANSI color codes
ESC="\x1b"
COL_PARENT =ESC+"[37m" # white
COL_URL =ESC+"[0m" # standard
COL_REAL =ESC+"[36m" # cyan
COL_BASE =ESC+"[35m" # magenty
COL_VALID =ESC+"[1;32m" # green
COL_INVALID =ESC+"[1;31m" # red
COL_INFO =ESC+"[0m" # standard
COL_WARNING =ESC+"[1;33m" # yellow
COL_DLTIME =ESC+"[0m" # standard
COL_RESET =ESC+"[0m" # reset to standard
# HTML colors
ColorBackground="\"#fff7e5\""
ColorUrl="\"#dcd5cf\""
ColorBorder="\"#000000\""
ColorLink="\"#191c83\""
TableWarning="<td bgcolor=\"#e0954e\">"
TableError="<td bgcolor=\"db4930\">"
TableOK="<td bgcolor=\"3ba557\">"
# HTML shortcuts
RowEnd="</td></tr>\n"
MyFont="<font face=\"Lucida,Verdana,Arial,sans-serif,Helvetica\">"
@ -87,26 +74,25 @@ class StandardLogger:
Unknown keywords will be ignored.
"""
def __init__(self, fd=sys.stdout):
def __init__(self, fileout=None, **args):
self.errors=0
self.warnings=0
self.fd = fd
self.fd = fileout and args['filename'] or sys.stdout
def init(self):
self.starttime = time.time()
self.fd.write(Config.AppInfo+"\n"+
Config.Freeware+"\n"+
_("Get the newest version at ")+Config.Url+"\n"+
_("Write comments and bugs to ")+Config.Email+"\n\n"+
_("Start checking at ")+_strtime(self.starttime)+"\n")
self.fd.write("%s\n%s\n" % (Config.AppInfo, Config.Freeware))
self.fd.write(_("Get the newest version at %s\n") % Config.Url)
self.fd.write(_("Write comments and bugs to %s\n\n") % Config.Email)
self.fd.write(_("Start checking at %s\n") % _strtime(self.starttime))
self.fd.flush()
def newUrl(self, urldata):
self.fd.write("\n"+_("URL")+Spaces["URL"]+urldata.urlName)
if urldata.cached:
self.fd.write(" (cached)\n")
self.fd.write(_(" (cached)\n"))
else:
self.fd.write("\n")
if urldata.parentName:
@ -157,9 +143,9 @@ class StandardLogger:
self.fd.write(str(self.errors)+_(" errors"))
self.fd.write(_(" found.\n"))
self.stoptime = time.time()
self.fd.write(_("Stopped checking at ")+_strtime(self.stoptime)+
(_(" (%.3f seconds)") %
(self.stoptime - self.starttime))+"\n")
self.fd.write(_("Stopped checking at %s (%.3f seconds)\n") % \
(_strtime(self.stoptime),
(self.stoptime - self.starttime)))
self.fd.flush()
self.fd = None
@ -176,7 +162,8 @@ class HtmlLogger(StandardLogger):
"<center><h2>"+MyFont+Config.AppName+"</font>"+
"</center></h2>"+
"<br><blockquote>"+Config.Freeware+"<br><br>"+
_("Start checking at ")+_strtime(self.starttime)+"<br><br>")
(_("Start checking at %s\n") % _strtime(self.starttime))+
"<br><br>")
self.fd.flush()
@ -189,7 +176,7 @@ class HtmlLogger(StandardLogger):
MyFont+"URL</font></td><td bgcolor="+ColorUrl+">"+MyFont+
StringUtil.htmlify(urlData.urlName))
if urlData.cached:
self.fd.write("(cached)")
self.fd.write(_(" (cached)\n"))
self.fd.write("</font>"+RowEnd)
if urlData.parentName:
@ -207,12 +194,12 @@ class HtmlLogger(StandardLogger):
"\">"+urlData.url+"</a></font>"+RowEnd)
if urlData.downloadtime:
self.fd.write("<tr><td>"+MyFont+_("D/L Time")+"</font></td><td>"+
MyFont+("%.3f" % urlData.downloadtime)+
" seconds</font>"+RowEnd)
MyFont+(_("%.3f seconds") % urlData.downloadtime)+
"</font>"+RowEnd)
if urlData.checktime:
self.fd.write("<tr><td>"+MyFont+_("Check Time")+
"</font></td><td>"+MyFont+
("%.3f" % urlData.checktime)+" seconds</font>"+
(_("%.3f seconds") % urlData.checktime)+"</font>"+
RowEnd)
if urlData.infoString:
self.fd.write("<tr><td>"+MyFont+_("Info")+"</font></td><td>"+
@ -236,7 +223,7 @@ class HtmlLogger(StandardLogger):
def endOfOutput(self):
self.fd.write(MyFont+_("Thats it. "))
self.fd.write(MyFont+_("\nThats it. "))
if self.warnings==1:
self.fd.write(_("1 warning, "))
else:
@ -245,16 +232,18 @@ class HtmlLogger(StandardLogger):
self.fd.write(_("1 error"))
else:
self.fd.write(str(self.errors)+_(" errors"))
self.fd.write(_(" found.")+"<br>")
self.fd.write(_(" found.\n")+"<br>")
self.stoptime = time.time()
self.fd.write(_("Stopped checking at ")+_strtime(self.stoptime)+
(_(" (%.3f seconds)") % (self.stoptime - self.starttime))+
"</font></blockquote><br><hr noshade size=1><small>"+
MyFont+Config.HtmlAppInfo+"<br>"+_("Get the newest version at ")+
"<a href=\""+Config.Url+"\">"+Config.Url+
"</a>.<br>"+_("Write comments and bugs to ")+"<a href=\"mailto:"+
Config.Email+"\">"+Config.Email+
"</a>.</font></small></body></html>")
self.fd.write(_("Stopped checking at %s (%.3f seconds)\n") %\
(_strtime(self.stoptime),
(self.stoptime - self.starttime)))
self.fd.write("</font></blockquote><br><hr noshade size=1><small>"+
MyFont+Config.HtmlAppInfo+"<br>")
self.fd.write(_("Get the newest version at %s\n") %\
("<a href=\""+Config.Url+"\">"+Config.Url+"</a>.<br>"))
self.fd.write(_("Write comments and bugs to %s\n") %\
("<a href=\"mailto:"+Config.Email+"\">"+Config.Email+"</a>."))
self.fd.write("</font></small></body></html>")
self.fd.flush()
self.fd = None
@ -262,8 +251,8 @@ class HtmlLogger(StandardLogger):
class ColoredLogger(StandardLogger):
"""ANSI colorized output"""
def __init__(self, fd=sys.stdout):
StandardLogger.__init__(self, fd)
def __init__(self, fileout=None, **args):
StandardLogger.__init__(self, fileout, args)
self.currentPage = None
self.prefix = 0
@ -290,7 +279,7 @@ class ColoredLogger(StandardLogger):
COL_RESET)
if urlData.line: self.fd.write(_(", line ")+`urlData.line`+"")
if urlData.cached:
self.fd.write(" (cached)\n")
self.fd.write(_(" (cached)\n"))
else:
self.fd.write("\n")
@ -355,16 +344,17 @@ class GMLLogger(StandardLogger):
"""GML means Graph Modeling Language. Use a GML tool to see
your sitemap graph.
"""
def __init__(self,fd=sys.stdout):
StandardLogger.__init__(self,fd)
def __init__(self, fileout=None, **args):
StandardLogger.__init__(self, fileout, args)
self.nodes = []
def init(self):
self.fd.write("# created by "+Config.AppName+" at "+
_strtime(time.time())+
"\n# "+_("Get the newest version at ")+Config.Url+
"\n# "+_("Write comments and bugs to ")+Config.Email+
"\ngraph [\n directed 1\n")
self.starttime = time.time()
self.fd.write(_("# created by %s at %s\n" % (Config.AppName,
_strtime(self.starttime)))
self.fd.write(_("# Get the newest version at %s\n") % Config.Url)
self.fd.write(_("# Write comments and bugs to %s\n\n") % Config.Email)
self.fd.write("graph [\n directed 1\n")
self.fd.flush()
def newUrl(self, urlData):
@ -377,12 +367,12 @@ class GMLLogger(StandardLogger):
for node in self.nodes:
if node.url and not writtenNodes.has_key(node.url):
self.fd.write(" node [\n")
self.fd.write(" id "+`nodeid`+"\n")
self.fd.write(' label "'+node.url+'"'+"\n")
self.fd.write(" id %d\n" % nodeid)
self.fd.write(' label "%s"\n' % node.url)
if node.downloadtime:
self.fd.write(" dltime "+`node.downloadtime`+"\n")
self.fd.write(" dltime %d\n" % node.downloadtime)
if node.checktime:
self.fd.write(" checktime "+`node.checktime`+"\n")
self.fd.write(" checktime %d\n" % node.checktime)
self.fd.write(" extern ")
if node.extern: self.fd.write("1")
else: self.fd.write("0")
@ -393,48 +383,66 @@ class GMLLogger(StandardLogger):
for node in self.nodes:
if node.url and node.parentName:
self.fd.write(" edge [\n")
self.fd.write(' label "'+node.urlName+'"\n')
self.fd.write(" source "+`writtenNodes[node.parentName]`+
"\n")
self.fd.write(" target "+`writtenNodes[node.url]`+"\n")
self.fd.write(' label "%s"\n' % node.urlName)
self.fd.write(" source %d\n"%writtenNodes[node.parentName])
self.fd.write(" target %d\n" % writtenNodes[node.url])
self.fd.write(" valid ")
if node.valid: self.fd.write("1")
else: self.fd.write("0")
self.fd.write("\n ]\n")
# end of output
self.fd.write("]\n")
self.stoptime = time.time()
self.fd.write(_("# Stopped checking at %s (%.3f seconds)\n") %\
(_strtime(self.stoptime),
(self.stoptime - self.starttime)))
self.fd.flush()
self.fd = None
class SQLLogger(StandardLogger):
""" SQL output for PostgreSQL, not tested"""
def __init__(self, fileout=None, **args):
StandardLogger.__init__(self, fileout, args)
self.dbname = args['dbname']
self.commandsep = args['commandsep']
def init(self):
self.fd.write("-- created by "+Config.AppName+" at "+
_strtime(time.time())+
"\n-- "+_("Get the newest version at ")+Config.Url+
"\n-- "+_("Write comments and bugs to ")+Config.Email+"\n\n")
self.starttime = time.time()
self.fd.write(_("-- created by %s at %s\n" % (Config.AppName,
_strtime(self.starttime)))
self.fd.write(_("-- Get the newest version at %s\n") % Config.Url)
self.fd.write(_("-- Write comments and bugs to %s\n\n") % Config.Email)
self.fd.flush()
def newUrl(self, urlData):
self.fd.write("insert into linksdb(urlname,recursionlevel,parentname,"
"baseref,errorstring,validstring,warningstring,"
"infoString,valid,url,line,cached) values '"+
urlData.urlName+"',"+
`urlData.recursionLevel`+","+
StringUtil.sqlify(urlData.parentName)+","+
StringUtil.sqlify(urlData.baseRef)+","+
StringUtil.sqlify(urlData.errorString)+","+
StringUtil.sqlify(urlData.validString)+","+
StringUtil.sqlify(urlData.warningString)+","+
StringUtil.sqlify(urlData.infoString)+","+
`urlData.valid`+","+
StringUtil.sqlify(urlData.url)+","+
`urlData.line`+","+
`urlData.cached`+");\n")
self.fd.write("insert into %s(urlname,recursionlevel,parentname,"
"baseref,errorstring,validstring,warningstring,infoString,"
"valid,url,line,checktime,downloadtime,cached) values ('%s',"
"%d,'%s','%s','%s','%s','%s','%s',%d,'%s',%d,%d,%d,%d)%s\n" % \
(self.dbname,
StringUtil.sqlify(urlData.urlName),
urlData.recursionLevel,
StringUtil.sqlify(urlData.parentName),
StringUtil.sqlify(urlData.baseRef),
StringUtil.sqlify(urlData.errorString),
StringUtil.sqlify(urlData.validString),
StringUtil.sqlify(urlData.warningString),
StringUtil.sqlify(urlData.infoString),
urlData.valid,
StringUtil.sqlify(urlData.url),
urlData.line,
urlData.checktime,
urlData.downloadtime,
urlData.cached,
self.commandsep))
self.fd.flush()
def endOfOutput(self):
self.stoptime = time.time()
self.fd.write(_("-- Stopped checking at %s (%.3f seconds)\n") %\
(_strtime(self.stoptime),
(self.stoptime - self.starttime)))
self.fd = None
@ -443,14 +451,11 @@ class BlacklistLogger:
is working (again), it is removed from the list. So after n days
we have only links on the list which failed for n days.
"""
def __init__(self):
def __init__(self, fileout=None, **args):
self.blacklist = {}
self.filename = args['filename']
def init(self):
"""initialize the blacklist
We do nothing here because we have read the blacklist in the
linkchecker script already.
"""
pass
def newUrl(self, urlData):
@ -461,7 +466,7 @@ class BlacklistLogger:
def endOfOutput(self):
"""write the blacklist"""
fd = open(Config.BlacklistFile, "w")
fd = open(args['filename'], "w")
for url in self.blacklist.keys():
if self.blacklist[url] is None:
fd.write(url+"\n")
@ -471,28 +476,56 @@ class CSVLogger(StandardLogger):
""" CSV output. CSV consists of one line per entry. Entries are
separated by a semicolon.
"""
def __init__(self, fileout=None, **args):
StandardLogger.__init__(self, fileout, args)
self.separator = args['separator']
def init(self):
self.fd.write("# created by "+Config.AppName+" at "+
_strtime(time.time())+
"\n# you get "+Config.AppName+" at "+Config.Url+
"\n# write comments and bugs to "+Config.Email+"\n\n")
self.starttime = time.time()
self.fd.write(_("# created by %s at %s\n") % (Config.AppName,
_strtime(self.starttime)))
self.fd.write(_("# Get the newest version at %s\n") % Config.Url)
self.fd.write(_("# Write comments and bugs to %s\n\n") % Config.Email)
self.fd.write(_("# Format of the entries:\n")+\
"# urlname;\n"
"# recursionlevel;\n"
"# parentname;\n"
"# baseref;\n"
"# errorstring;\n"
"# validstring;\n"
"# warningstring;\n"
"# infostring;\n"
"# valid;\n"
"# url;\n"
"# line;\n"
"# downloadtime;\n"
"# checktime;\n"
"# cached;\n")
self.fd.flush()
def newUrl(self, urlData):
self.fd.write(`urlData.urlName`+';'+
`urlData.recursionLevel`+';'+
`urlData.parentName`+';'+
`urlData.baseRef`+';'+
`urlData.errorString`+';'+
`urlData.validString`+';'+
`urlData.warningString`+';'+
`urlData.infoString`+';'+
`urlData.valid`+';'+
`urlData.url`+';'+
`urlData.line`+';'+
`urlData.cached`+'\n')
self.fd.write(
"%s%s%d%s%s%s%s%s%s%s%s%s%s%s%s%s%d%s%s%s%d%%s%d%s%d%s%d\n" % (
urlData.urlName, self.separator,
urlData.recursionLevel, self.separator,
urlData.parentName, self.separator,
urlData.baseRef, self.separator,
urlData.errorString, self.separator,
urlData.validString, self.separator,
urlData.warningString, self.separator,
urlData.infoString, self.separator,
urlData.valid, self.separator,
urlData.url, self.separator,
urlData.line, self.separator,
urlData.downloadtime, self.separator,
urlData.checktime, self.separator,
urlData.cached))
self.fd.flush()
def endOfOutput(self):
self.stoptime = time.time()
self.fd.write(_("# Stopped checking at %s (%.3f seconds)\n") %\
(_strtime(self.stoptime),
(self.stoptime - self.starttime)))
self.fd = None

View file

@ -13,103 +13,100 @@ import getopt,re,string,os
import linkcheck,StringUtil
from linkcheck import _
Usage = _("""USAGE\tlinkchecker [options] file_or_url...
Usage = _("USAGE\tlinkchecker [options] file_or_url...\n"
"\n"
"OPTIONS\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTTP links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F name, --file-output=name\n"
" Same as output, but write to a file linkchecker-out.<name>.\n"
" If the file already exists, it is overwritten. You can specify\n"
" this option more than once. There is no file output for the\n"
" blacklist logger. Default is no file output.\n"
"-i regex, --intern=regex\n"
" Assume urls that match the given expression as intern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-l, --allowdeny\n"
" Swap checking order to intern/extern. Default checking order\n"
" is extern/intern.\n"
"-N, --nntp-server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o name, --output=name\n"
" Specify output as %s.\n"
" Default is text.\n"
"-p pwd, --password=pwd\n"
" Try given password for HTML and FTP authorization.\n"
" Default is 'guest@'. See -u.\n"
"-P host[:port], --proxy=host[:port]\n"
" Use specified proxy for HTTP requests.\n"
" Standard port is 8080. Default is to use no proxy.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try given username for HTML and FTP authorization.\n"
" Default is 'anonymous'. See -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" You can use this to check for pages that contain some form of\n"
" error message, for example 'This page has moved' or\n"
" 'Oracle Application Server error'.\n"
" This option implies -w.\n") % linkcheck.Config.LoggerKeys)
OPTIONS
-a, --anchors
Check anchor references. Default is don't check anchors.
-D, --debug
Print additional debugging information.
-e regex, --extern=regex
Assume urls that match the given expression as extern.
Only intern HTTP links are checked recursively.
-f file, --config=file
Use file as configuration file. LinkChecker first searches
~/.linkcheckerrc and then /etc/linkcheckerrc
(under Windows <path-to-program>\\linkcheckerrc).
-F name, --file-output=name
Same as output, but write to a file linkchecker-out.<name>.
If the file already exists, it is overwritten. You can specify
this option more than once. There is no file output for the
blacklist logger. Default is no file output.
-i regex, --intern=regex
Assume urls that match the given expression as intern.
-h, --help
Help me! Print usage information for this program.
-l, --allowdeny
Swap checking order to intern/extern. Default checking order
is extern/intern.
-N, --nntp-server
Specify an NNTP server for 'news:...' links. Default is the
environment variable NNTP_SERVER. If no host is given,
only the syntax of the link is checked.
-o name, --output=name
Specify output as %s.
Default is text.
-p pwd, --password=pwd
Try given password for HTML and FTP authorization.
Default is 'guest@'. See -u.
-P host[:port], --proxy=host[:port]
Use specified proxy for HTTP requests.
Standard port is 8080. Default is to use no proxy.
-q, --quiet
Quiet operation. This is only useful with -F.
-r depth, --recursion-level=depth
Check recursively all links up to given depth (depth >= 0).
Default depth is 1.
-R, --robots-txt
Obey the robots exclusion standard.
-s, --strict
Check only syntax of extern links, do not try to connect to them.
-t num, --threads=num
Generate no more than num threads. Default number of threads is 5.
To disable threading specify a non-positive number.
-u name, --user=name
Try given username for HTML and FTP authorization.
Default is 'anonymous'. See -p.
-V, --version
Print version and exit.
-v, --verbose
Log all checked URLs (implies -w). Default is to log only invalid
URLs.
-w, --warnings
Log warnings.
-W regex, --warning-regex=regex
Define a regular expression which prints a warning if it matches
any content of the checked link.
This applies of course only to pages which are valid, so we can
get their content.
You can use this to check for pages that contain some form of
error message, for example "This page has moved" or
"Oracle Application Server error".
This option implies -w.
""") % linkcheck.Config.LoggerKeys
Notes = _("NOTES\n"
"o LinkChecker assumes an http:// resp. ftp:// link when a commandline URL\n"
" starts with 'www.' resp. 'ftp.'\n"
" You can also give local files as arguments\n"
"o If you have your system configured to automatically establish a\n"
" connection to the internet (e.g. with diald), it will connect when\n"
" checking links not pointing to your local host\n"
" Use the -s and -i options to prevent this (see EXAMPLES)\n"
"o Javascript links are currently ignored\n"
"o If your platform does not support threading, LinkChecker uses -t0\n"
"o You can supply multiple user/password pairs in a configuration file\n"
"o Cookies are not accepted by LinkChecker\n"
"o When checking 'news:' links the given NNTP host doesn't need to be the\n"
" same as the host of the user browsing your pages!\n")
Notes = _("""NOTES
o LinkChecker assumes an http:// resp. ftp:// link when a commandline URL
starts with "www." resp. "ftp."
You can also give local files as arguments
o If you have your system configured to automatically establish a
connection to the internet (e.g. with diald), it will connect when
checking links not pointing to your local host
Use the -s and -i options to prevent this (see EXAMPLES)
o Javascript links are currently ignored
o If your platform does not support threading, linkchecker assumes -t0
o You can supply multiple user/password pairs in a configuration file
o Cookies are not accepted by LinkChecker
o When checking 'news:' links the given news host doesn't need to be the
same as the host of the user browsing your pages!
""")
Examples = _("""EXAMPLES
o linkchecker -v -o html -r2 -s -i treasure.calvinsplayground.de \\
http://treasure.calvinsplayground.de/~calvin/ > sample.html
o Local files and syntactic sugar on the command line:
linkchecker c:\\temp\\test.html
linkchecker ../bla.html
linkchecker www.myhomepage.de
linkchecker -r0 ftp.linux.org
""")
Examples = _("EXAMPLES\n"
"o linkchecker -v -o html -r2 -s -i treasure.calvinsplayground.de \\\n"
" http://treasure.calvinsplayground.de/~calvin/ > sample.html\n"
"o Local files and syntactic sugar on the command line:\n"
" linkchecker c:\\temp\\test.html\n"
" linkchecker ../bla.html\n"
" linkchecker www.myhomepage.de\n"
" linkchecker -r0 ftp.linux.org\n")
def printVersion():
print linkcheck.Config.AppInfo
@ -285,8 +282,8 @@ if constructauth:
# construct the url list
# if we use blacklist mode, try to read ~/.blacklist
if config["log"].__class__ == linkcheck.Logging.BlacklistLogger and \
os.path.exists(linkcheck.Config.BlacklistFile):
args = open(linkcheck.Config.BlacklistFile).readlines()
os.path.exists(config['log'].filename):
args = open(config['log'].filename).readlines()
if len(args)==0:
print _("warning: no files or urls given")

View file

@ -8,9 +8,59 @@
#warnings=0
#quiet=0
#fileoutput = text colored html gml sql
# customize your output filenames
#fileoutputnames = {'text': 'myoutput.txt', 'html': 'mylinks.htm'}
# each Logger can have separate configuration parameters
# standard text logger
[text]
#filename=linkchecker-out.txt
# GML logger
[gml]
#filename=linkchecker-out.gml
# CSV logger
[csv]
#filename=linkchecker-out.csv
#separator=;
# SQL logger
[sql]
#filename=linkchecker-out.sql
#dbname=linksdb
#commandsep=;
# HTML logger
[html]
#filename=linkchecker-out.html
#colorbackground=
#colorurl=
#colorborder=
#colorlink=
#tablewarning=
#tableok=
#tableok=
#rowend=
#myfont=
# ANSI color logger
[colored]
#filename=linkchecker-out.ansi
#colorparent=
#colorurl=
#colorreal=
#colorbase=
#colorvalid=
#colorinvalid=
#colorinfo=
#colorwarning=
#colordltime=
#colorreset=
# blacklist logger
[blacklist]
#filename=~/.blacklist
# checking options
[checking]
#threads=5
#anchors=0
@ -22,6 +72,7 @@
#warningregex="Request failed"
#nntpserver=news.uni-stuttgart.de
# filtering options
# for each extern link we can specify if it is strict or not
[filtering]
# strict avoid checking of local files

View file

@ -6,7 +6,7 @@
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"POT-Creation-Date: 2000-05-30 12:02+0200\n"
"POT-Creation-Date: 2000-06-03 14:05+0200\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@ -14,241 +14,387 @@ msgstr ""
"Content-Type: text/plain; charset=CHARSET\n"
"Content-Transfer-Encoding: ENCODING\n"
#: Logging.py:130 linkcheck/Logging.py:148
msgid ""
"\n"
"Thats it. "
msgstr ""
"\n"
"Das wars. "
#: Logging.py:143 linkcheck/Logging.py:161 linkcheck/Logging.py:251
#, c-format
msgid " (%.3f seconds)"
msgstr " (%.3f Sekunden)"
#: Logging.py:139 linkcheck/Logging.py:157 linkcheck/Logging.py:247
msgid " (cached)\n"
msgstr " (aus dem Cache)\n"
msgid " errors"
msgstr " Fehler"
#: linkcheck/Logging.py:248
msgid " found."
msgstr " gefunden"
#: Logging.py:140 linkcheck/Logging.py:158
msgid " found.\n"
msgstr " gefunden.\n"
#: Logging.py:135 linkcheck/Logging.py:153 linkcheck/Logging.py:243
msgid " warnings, "
msgstr " Warnungen, "
#: linkcheck/Logging.py:312 linkcheck/Logging.py:317
#, c-format
msgid "# Format of the entries:\n"
msgstr "# Format der Einträge:\n"
msgid "# Get the newest version at %s\n"
msgstr "# Die neueste Version gibt es unter %s\n"
msgid "# Stopped checking at %s (%.3f seconds)\n"
msgstr "# Beende Prüfen am %s (%.3f Sekunden)\n"
msgid "# Write comments and bugs to %s\n\n"
msgstr "# Kommentare und Fehler mailen Sie bitte an %s\n\n"
msgid "# created by %s at %s\n"
msgstr "# erstellt von %s am %s\n"
msgid "%.3f seconds"
msgstr "%.3f Sekunden"
#: Logging.py:104 Logging.py:107 linkcheck/Logging.py:122
#: linkcheck/Logging.py:125
#, c-format
msgid "%.3f seconds\n"
msgstr "%.3f Sekunden\n"
#: Logging.py:96 linkcheck/Logging.py:114 linkcheck/Logging.py:291
msgid ", line "
msgstr ", Zeile "
#: Logging.py:137 linkcheck/Logging.py:155 linkcheck/Logging.py:245
msgid "-- Get the newest version at %s\n"
msgstr "-- Die neueste Version gibt es unter %s\n"
msgid "-- Stopped checking at %s (%.3f seconds)\n"
msgstr "-- Beende Prüfen am %s (%.3f Sekunden)\n"
msgid "-- Write comments and bugs to %s\n\n"
msgstr "-- Kommentare und Fehler mailen Sie bitte an %s\n\n"
msgid "-- created by %s at %s\n"
msgstr "-- erstellt von %s am %s\n"
msgid "1 error"
msgstr "1 Fehler"
#: Logging.py:133 linkcheck/Logging.py:151 linkcheck/Logging.py:241
msgid "1 warning, "
msgstr "1 Warnung, "
#: linkcheck/HttpUrlData.py:75
msgid "Access denied by robots.txt, checked only syntax"
msgstr "Zugriff verweigert durch robots.txt; prüfe lediglich Syntax"
#: Logging.py:99 linkcheck/Logging.py:117 linkcheck/Logging.py:202
#: linkcheck/Logging.py:300
msgid "Base"
msgstr "Basis"
#: Logging.py:106 linkcheck/Logging.py:124 linkcheck/Logging.py:213
#: linkcheck/Logging.py:316
msgid "Check Time"
msgstr "Prüfzeit"
#: Logging.py:103 linkcheck/Logging.py:121 linkcheck/Logging.py:209
#: linkcheck/Logging.py:311
msgid "D/L Time"
msgstr "D/L Zeit"
#: linkcheck/HttpUrlData.py:115
#, c-format
#: linkchecker:102
msgid ""
"EXAMPLES\n"
"o linkchecker -v -o html -r2 -s -i treasure.calvinsplayground.de \\\n"
" http://treasure.calvinsplayground.de/~calvin/ > sample.html\n"
"o Local files and syntactic sugar on the command line:\n"
" linkchecker c:\\temp\\test.html\n"
" linkchecker ../bla.html\n"
" linkchecker www.myhomepage.de\n"
" linkchecker -r0 ftp.linux.org\n"
msgstr ""
"BEISPIELE\n"
"o linkchecker -v -o html -r2 -s -i treasure.calvinsplayground.de \\\n"
" http://treasure.calvinsplayground.de/~calvin/ > sample.html\n"
"o Lokale Dateien und syntaktischer Zucker in der Kommandozeile:\n"
" linkchecker c:\\temp\\test.html\n"
" linkchecker ../bla.html\n"
" linkchecker www.myhomepage.de\n"
" linkchecker -r0 ftp.linux.org\n"
msgid "Effective URL %s"
msgstr "Effektive URL %s"
#: linkcheck/UrlData.py:50 linkcheck/UrlData.py:68
msgid "Error"
msgstr "Fehler"
#: linkchecker:128
#, c-format
msgid "Error: %s\n"
msgstr "Fehler: %s\n"
#: Logging.py:82 linkcheck/Logging.py:100 linkcheck/Logging.py:253
#: linkcheck/Logging.py:365 linkcheck/Logging.py:415
msgid "Get the newest version at "
msgstr "Die neueste Version gibt es unter "
#: linkcheck/FtpUrlData.py:34
msgid "Got no answer from FTP server"
msgstr "Keine Antwort vom FTP Server"
#: linkcheck/NntpUrlData.py:41
#, c-format
msgid "Group %s has %s articles, range %s to %s"
msgstr "Gruppe %s hat %s Artikel, von %s bis %s"
#: linkcheck/HttpUrlData.py:119
msgid "HTTP 301 (moved permanent) encountered: you should update this link"
msgstr "HTTP 301 (moved permanent) gefunden: Sie sollten diesen Link aktualisieren"
#: linkcheck/HttpsUrlData.py:44
msgid "HTTPS url ignored"
msgstr "HTTPS url ignoriert"
#: linkcheck/NntpUrlData.py:31
msgid "Illegal NNTP link syntax"
msgstr "Illegale NNTP link Syntax"
#: linkchecker:208 linkchecker:216 linkchecker:247
#, c-format
msgid "Illegal argument '%s' for option "
msgstr "Ungültiges Argument '%s' für Option "
#: linkcheck/TelnetUrlData.py:32
msgid "Illegal telnet link syntax"
msgstr "Illegale telnet link Syntax"
#: Logging.py:109 linkcheck/Logging.py:127 linkcheck/Logging.py:218
#: linkcheck/Logging.py:321 linkcheck/Logging.py:325
msgid "Info"
msgstr "Info"
#: linkcheck/JavascriptUrlData.py:25
msgid "Javascript url ignored"
msgstr "Javascript url ignoriert"
#: linkcheck/HttpUrlData.py:73
msgid "Missing '/' at end of URL"
msgstr "Fehlendes '/' am Ende der URL"
#: linkcheck/NntpUrlData.py:37
msgid "No NNTP server specified, checked only syntax"
msgstr "Kein NNTP Server angegeben; prüfe lediglich Syntax"
#: linkcheck/MailtoUrlData.py:66
msgid "No adresses found"
msgstr "Keine Adressen gefunden"
#: linkcheck/MailtoUrlData.py:91
#, c-format
msgid "None of the mail hosts for %s accepts an SMTP connection: %s"
msgstr "Keiner der Mail Hosts für %s akzeptiert eine SMTP Verbindung: %s"
#: Logging.py:95 linkcheck/Logging.py:113 linkcheck/Logging.py:196
#: linkcheck/Logging.py:275
msgid "Parent URL"
msgstr "Vater URL"
#: Logging.py:101 linkcheck/Logging.py:119 linkcheck/Logging.py:205
#: linkcheck/Logging.py:306
msgid "Real URL"
msgstr "Tats. URL"
#: Logging.py:120 linkcheck/Logging.py:138 linkcheck/Logging.py:227
#: linkcheck/Logging.py:231 linkcheck/Logging.py:339
msgid "Result"
msgstr "Ergebnis"
#: Logging.py:84 linkcheck/Logging.py:102 linkcheck/Logging.py:179
msgid "Start checking at "
msgstr "Beginne Prüfen am "
#: Logging.py:142 linkcheck/Logging.py:160 linkcheck/Logging.py:250
msgid "Stopped checking at "
msgstr "Beende Prüfen am "
#: linkcheck/Logging.py:239
msgid "Thats it. "
msgstr "Das wars. "
#: linkchecker:129
msgid "Execute 'linkchecker -h' for help\n"
msgstr "Führen Sie 'linkchecker -h' aus um Hilfe zu erhalten\n"
#: Logging.py:89 linkcheck/Logging.py:107 linkcheck/Logging.py:289
msgid "Get the newest version at %s\n"
msgstr "Die neueste Version gibt es unter"
msgid "Got no answer from FTP server"
msgstr "Keine Antwort vom FTP Server"
msgid "Group %s has %s articles, range %s to %s"
msgstr "Gruppe %s hat %s Artikel, von %s bis %s"
msgid "HTTP 301 (moved permanent) encountered: you should update this link"
msgstr ""
"HTTP 301 (moved permanent) gefunden: Sie sollten diesen Link aktualisieren"
msgid "HTTPS url ignored"
msgstr "HTTPS url ignoriert"
msgid "Illegal NNTP link syntax"
msgstr "Illegale NNTP link Syntax"
msgid "Illegal argument '%s' for option "
msgstr "Ungültiges Argument '%s' für Option "
msgid "Illegal telnet link syntax"
msgstr "Illegale telnet link Syntax"
msgid "Info"
msgstr "Info"
msgid "Javascript url ignored"
msgstr "Javascript url ignoriert"
msgid "Missing '/' at end of URL"
msgstr "Fehlendes '/' am Ende der URL"
msgid ""
"NOTES\n"
"o LinkChecker assumes an http:// resp. ftp:// link when a commandline URL\n"
" starts with 'www.' resp. 'ftp.'\n"
" You can also give local files as arguments\n"
"o If you have your system configured to automatically establish a\n"
" connection to the internet (e.g. with diald), it will connect when\n"
" checking links not pointing to your local host\n"
" Use the -s and -i options to prevent this (see EXAMPLES)\n"
"o Javascript links are currently ignored\n"
"o If your platform does not support threading, LinkChecker uses -t0\n"
"o You can supply multiple user/password pairs in a configuration file\n"
"o Cookies are not accepted by LinkChecker\n"
"o When checking 'news:' links the given NNTP host doesn't need to be the\n"
" same as the host of the user browsing your pages!\n"
msgstr ""
"KOMMENTARE\n"
"o LinkChecker verwendet eine http:// bzw. ftp:// URL wenn eine URL auf der\n"
" Kommandozeile mit 'www.' bzw. 'ftp.' beginnt\n"
" Sie können auch lokale Dateien als Argumente angeben\n"
"o Falls sich Ihr System automatisch mit dem Internet verbindet\n"
" (z.B. mit diald), wird es dies tun wenn Sie Links prüfen, die nicht\n"
" auf Ihren lokalen Rechner verweisen\n"
" Benutzen Sie die Optionen -s und -i um dies zu verhindern (siehe BEISPIELE)\n"
"o Javascript Links werden zur Zeit ignoriert\n"
"o Wenn Ihr System keine Threads unterstützt verwendet LinkChecker die\n"
" Option -t0\n"
"o Sie können mehrere user/password Paare in einer Konfigurationsdatei\n"
" angeben\n"
"o Cookies werden von LinkChecker nicht akzeptiert\n"
"o Beim Prüfen von 'news:' Links muß der angegebene NNTP Rechner nicht\n"
" unbedingt derselbe wie der des Benutzers sein!\n"
msgid "No NNTP server specified, checked only syntax"
msgstr "Kein NNTP Server angegeben; prüfe lediglich Syntax"
msgid "No adresses found"
msgstr "Keine Adressen gefunden"
msgid "None of the mail hosts for %s accepts an SMTP connection: %s"
msgstr "Keiner der Mail Hosts für %s akzeptiert eine SMTP Verbindung: %s"
msgid "Parent URL"
msgstr "Vater URL"
msgid "Real URL"
msgstr "Tats. URL"
msgid "Result"
msgstr "Ergebnis"
msgid "Start checking at %s\n"
msgstr "Beginne Prüfen am %s\n"
msgid "Stopped checking at %s (%.3f seconds)\n"
msgstr "Beende Prüfen am %s (%.3f Sekunden)\n"
msgid "Thats it. "
msgstr "Das wars. "
msgid "URL"
msgstr "URL"
#: linkcheck/UrlData.py:125
msgid "URL is null or empty"
msgstr "URL ist Null oder leer"
#: linkchecker:16
msgid "USAGE\tlinkchecker [options] file_or_url..."
msgid ""
"USAGE\tlinkchecker [options] file_or_url...\n"
"\n"
"OPTIONS\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTTP links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F name, --file-output=name\n"
" Same as output, but write to a file linkchecker-out.<name>.\n"
" If the file already exists, it is overwritten. You can specify\n"
" this option more than once. There is no file output for the\n"
" blacklist logger. Default is no file output.\n"
"-i regex, --intern=regex\n"
" Assume urls that match the given expression as intern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-l, --allowdeny\n"
" Swap checking order to intern/extern. Default checking order\n"
" is extern/intern.\n"
"-N, --nntp-server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o name, --output=name\n"
" Specify output as %s.\n"
" Default is text.\n"
"-p pwd, --password=pwd\n"
" Try given password for HTML and FTP authorization.\n"
" Default is 'guest@'. See -u.\n"
"-P host[:port], --proxy=host[:port]\n"
" Use specified proxy for HTTP requests.\n"
" Standard port is 8080. Default is to use no proxy.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try given username for HTML and FTP authorization.\n"
" Default is 'anonymous'. See -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" You can use this to check for pages that contain some form of\n"
" error message, for example 'This page has moved' or\n"
" 'Oracle Application Server error'.\n"
" This option implies -w.\n"
msgstr ""
"BENUTZUNG\tlinkchecker [options] datei_oder_url...\n"
"\n"
"OPTIONEN\n"
"-a, --anchors\n"
" Prüfe interne Verweise. Standard ist keine Prüfung.\n"
"-D, --debug\n"
" Drucke zusätzlich Debug Information.\n"
"-e regex, --extern=regex\n"
" Behandle URLs welche diesen Ausdruck matchen als extern.\n"
" Nur interne HTTP Links werden rekursiv geprüft.\n"
"-f file, --config=file\n"
" Benutze file als Konfigurationsdatei. LinkChecker sucht zuerst\n"
" ~/.linkcheckerrc und dann /etc/linkcheckerrc\n"
" (unter Windows <Pfad-zum-Programm>\\linkcheckerrc).\n"
"-F name, --file-output=name\n"
" Wie --output, aber schreibe in eine Datei (Standard ist\n"
" linkchecker-out.<name>)\n"
" Falls die Datei bereits existiert wird sie überschrieben.\n"
" Sie können diese Option mehr als einmal verwenden. Es gibt keine\n"
" Ausgabedatei für den blacklist Logger. Standard ist keine Ausgabe\n"
" in eine Datei.\n"
"-i regex, --intern=regex\n"
" Behandle URLs welche diese Ausdruck matchen als intern.\n"
"-h, --help\n"
" Hilf mir! Druche Nutzungsinformation für dieses Programm.\n"
"-l, --allowdeny\n"
" Vertausche die Prüfreihenfolge zu intern/extern. Standardmäßige\n"
" Reihenfolge ist extern/intern.\n"
"-N, --nntp-server\n"
" Gibt ein NNTP Rechner für 'news:...' Links. Standard ist die\n"
" Umgebungsvariable NNTP_SERVER. Falls kein Rechner angegeben ist,\n"
" wird lediglich auf korrekte Syntax des Links geprüft.\n"
"-o name, --output=name\n"
" Verwende die Ausgabe %s.\n"
" Standard ist text.\n"
"-p pwd, --password=pwd\n"
" Verwende das angegebene Passwort für HTML und FTP Authorisation.\n"
" Standard ist 'guest@'. Siehe -u.\n"
"-P host[:port], --proxy=host[:port]\n"
" Verwende den angegebenen Proxy für HTTP Anfragen.\n"
" Standard Port ist 8080. Standard ist keine Verwendung eines Proxy.\n"
"-q, --quiet\n"
" Keine Ausgabe. Dies ist nur in Verbindung mit -F nützlich.\n"
"-r depth, --recursion-level=depth\n"
" Prüfe rekursiv alle Verweise bis zu der angegebenen Tiefe\n"
" (depth >= 0). Standard Tiefe ist 1.\n"
"-R, --robots-txt\n"
" Befolge den Robots Exclusion Standard.\n"
"-s, --strict\n"
" Prüfe lediglich die Syntax von externen Verweisen. Es wird keine.\n"
" Verbindung zu diesen Rechner aufgebaut.\n"
"-t num, --threads=num\n"
" Generiere nicht mehr als num Threads. Standard Anzahl von Threads\n"
" ist 5. Um Threading auszuschalten geben Sie eine nichtpositive\n"
" Anzahl an.\n"
"-u name, --user=name\n"
" Verwende den angegebenen Benutzernamen für HTML und FTP\n"
" Authorisation. Standard ist 'anonymous'. Siehe -p.\n"
"-V, --version\n"
" Drucke die Version und beende das Programm.\n"
"-v, --verbose\n"
" Logge alle geprüften URLs (nimmt -w an). Standard ist es, nur\n"
" fehlerhafte URLs zu loggen.\n"
"-w, --warnings\n"
" Logge Warnungen.\n"
"-W regex, --warning-regex=regex\n"
" Definieren Sie einen regulären Ausdruck, der eine Warnung ausdruckt\n"
" falls er den Inhalt eines geprüften Verweises matcht.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" You can use this to check for pages that contain some form of\n"
" error message, for example 'This page has moved' or\n"
" 'Oracle Application Server error'.\n"
" This option implies -w.\n"
#: linkcheck/UrlData.py:51 linkcheck/UrlData.py:72
msgid "Valid"
msgstr "Gültig"
#: Logging.py:115 linkcheck/Logging.py:133 linkcheck/Logging.py:223
#: linkcheck/Logging.py:334
msgid "Warning"
msgstr "Warnung"
#: Logging.py:83 linkcheck/Logging.py:101 linkcheck/Logging.py:255
#: linkcheck/Logging.py:366 linkcheck/Logging.py:416
msgid "Write comments and bugs to "
msgstr "Kommentare und Fehler mailen Sie bitte an "
#: linkcheck/MailtoUrlData.py:107
msgid "Write comments and bugs to %s\n"
msgstr "Kommentare und Fehler mailen Sie bitte an %s\n"
msgid "could not split the mail adress"
msgstr "konnte Mail Adresse nicht splitten"
#: linkcheck/HostCheckingUrlData.py:44
msgid "found"
msgstr "gefunden"
#: linkcheck/MailtoUrlData.py:96
#, c-format
msgid "found mail host %s"
msgstr "Mail host %s gefunden"
#: linkcheck/UrlData.py:148
msgid "outside of domain filter, checked only syntax"
msgstr "außerhalb des Domain Filters; prüfe lediglich Syntax"
#: linkcheck/RobotsTxt.py:94
#, c-format
msgid "robots.txt:%d: allow without user agents"
msgstr "robots.txt:%d: allow ohne user agents"
#: linkcheck/RobotsTxt.py:85
#, c-format
msgid "robots.txt:%d: disallow without user agents"
msgstr "robots.txt:%d: disallow ohne user agents"
#: linkcheck/RobotsTxt.py:77
#, c-format
msgid "robots.txt:%d: user-agent in the middle of rules"
msgstr "robots.txt:%d: user-agent zwischen Regeln"
#: linkchecker:292
msgid "warning: no files or urls given"
msgstr "Warnung: keine Dateien oder URLs angegeben"

View file

@ -18,6 +18,7 @@
from distutils.core import setup
from distutils.dist import Distribution
from distutils.extension import Extension
from Template import Template
import sys,os,string
@ -35,10 +36,10 @@ class LCDistribution(Distribution):
if incldir:
self.announce("SSL header file ssl.h found, "
"enabling SSL compilation.")
self.ext_modules = [('ssl', {'sources': ['ssl.c'],
'include_dirs': [incldir],
'library_dirs': ['/usr/lib'],
'libs': ['ssl']})]
self.ext_modules = [Extension('ssl', ['ssl.c'],
include_dirs=[incldir],
library_dirs=['/usr/lib'],
libraries=['ssl'])]
else:
self.announce("SSL header file ssl.h missing, "
"disabling SSL compilation.\n"