bitmap file for Distutils 1.0.2 installer

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@269 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2001-05-25 20:13:08 +00:00
parent 74b1d438da
commit 098a279696
9 changed files with 368 additions and 332 deletions

View file

@ -1,38 +1,71 @@
# $Id$
import sys
import getopt
import socket
import string
import sys, re, getopt, socket
import DNS,DNS.Lib,DNS.Type,DNS.Class,DNS.Opcode
#import asyncore
defaults= { 'protocol':'udp', 'port':53, 'opcode':DNS.Opcode.QUERY,
'qtype':DNS.Type.A, 'rd':1, 'timing':1 }
defaults= {
'protocol': 'udp',
'port': 53,
'opcode': DNS.Opcode.QUERY,
'qtype':DNS.Type.A,
'rd':1,
'timing':1,
'server': [],
'search_domains': [],
}
defaults['server']=[]
def ParseResolvConf():
"parses the /etc/resolv.conf file and sets defaults for name servers"
import string
def init_dns_resolver():
global defaults
lines=open("/etc/resolv.conf").readlines()
for line in lines:
line = string.strip(line)
import os
if os.name=="posix":
init_dns_resolver_posix()
elif os.name=="nt":
init_dns_resolver_nt()
if not defaults['search_domains']:
defaults['search_domains'].append('')
if not defaults['server']:
defaults['server'].append('127.0.0.1')
def init_dns_resolver_posix():
"parses the /etc/resolv.conf file and sets defaults for name servers"
global defaults
for line in open('/etc/resolv.conf', 'r').readlines():
line = line.strip()
if (not line) or line[0]==';' or line[0]=='#':
continue
fields=string.split(line)
if fields[0]=='domain':
defaults['domain']=fields[1]
if fields[0]=='search':
pass
if fields[0]=='options':
pass
if fields[0]=='sortlist':
pass
if fields[0]=='nameserver':
defaults['server'].append(fields[1])
m = re.match(r'^search\s+\.?(.*)$', line)
if m:
for domain in m.group(1).split():
defaults['search_domains'].append('.'+lower(domain))
m = re.match(r'^nameserver\s+(\S+)\s*$', line)
if m: defaults['server'].append(m.group(1))
m = re.match(r'^domain\s+(\S+)\s*$', line)
if m: defaults['domain']= m.group(1)
def init_dns_resolver_nt():
"""Windows network config read from registry"""
import winreg
global defaults
try:
key = winreg.key_handle(winreg.HKEY_LOCAL_MACHINE,
r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters")
except WindowsError:
# key not found :(
return
if key.get("EnableDhcp"):
nameserver = key.get("DhcpNameServer")
else:
nameserver = key.get("NameServer")
if nameserver:
defaults['server'].append(nameserver)
searchlist = key.get("SearchList", [])
for domain in searchlist:
defaults['search_domains'].append('.'+lower(domain))
class DnsRequest:
def __init__(self,*name,**args):
@ -125,10 +158,10 @@ class DnsRequest:
self.port = self.args['port']
opcode = self.args['opcode']
rd = self.args['rd']
server=self.args['server']
server = self.args['server']
if type(self.args['qtype']) == type('foo'):
try:
qtype = eval(string.upper(self.args['qtype']), DNS.Type.__dict__)
qtype = eval(self.args['qtype'].upper(), DNS.Type.__dict__)
except (NameError,SyntaxError):
raise DNS.Error,'unknown query type'
else:

View file

@ -1,4 +1,3 @@
# -*- Mode: Python; tab-width: 4 -*-
# $Id$
# Author: Sam Rushing <rushing@nightmare.com>

View file

@ -16,7 +16,6 @@ def mxlookup(name):
convenience routine for doing an MX lookup of a name. returns a
sorted list of (preference, mail exchanger) records
"""
a = Base.DnsRequest(name, qtype = 'mx').req().answers
l = map(lambda x:x['data'], a)
l.sort()

View file

@ -5,7 +5,7 @@ include lc.cgi lc.fcgi lc.sz_fcgi
include Makefile
include create.sql
include debian/rules debian/changelog debian/copyright debian/control
include debian/linkchecker.* debian/linkchecker-ssl.* debian/*-ssl
include debian/linkchecker.*
include DNS/README
include test/*.py test/*.txt
include test/output/test_* test/html/*.html

View file

@ -42,7 +42,7 @@ config:
dist: locale config
./setup.py sdist --formats=gztar,zip # bdist_rpm
# extra run without SSL compilation
./setup.py bdist_wininst
./setup.py bdist_wininst --bitmap="guruguru.bmp"
deb:
# cleandeb because distutils choke on dangling symlinks

BIN
guruguru.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

302
po/de.po
View file

@ -175,6 +175,157 @@ msgstr "illegale recursionlevel Nummer %d"
msgid "%.3f seconds"
msgstr "%.3f Sekunden"
#, fuzzy
msgid ""
"USAGE\tlinkchecker [options] file-or-url...\n"
"\n"
"OPTIONS\n"
"For single-letter option arguments the space is not a necessity. So\n"
"'-o colored' is the same as '-ocolored'.\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTML links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F type[/filename], --file-output=type[/filename]\n"
" Same as -o, but write to a file linkchecker-out.<type>\n"
" or <filename> if specified. If the file already exists, it\n"
" is overwritten. You can specify this option more than once.\n"
" There is no file output for the blacklist logger. Default is\n"
" no file output.\n"
"-i regex, --intern=regex\n"
" Assume URLs that match the given expression as intern.\n"
" LinkChecker descends recursively only to intern URLs, not to "
"extern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-N server, --nntp-server=server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o type, --output=type\n"
" Specify output type as %s.\n"
" Default type is text.\n"
"-p pwd, --password=pwd\n"
" Try password pwd for HTML and FTP authorization.\n"
" Default password is 'joe@'. See also -u.\n"
"-P secs, --pause=secs\n"
" Pause <secs> seconds between each url check. This option\n"
"\timplies -t0.\n"
" Default is no pause between requests.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try username name for HTML and FTP authorization.\n"
" Default is 'anonymous'. See also -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" Use this to check for pages that contain some form of error\n"
" message, for example 'This page has moved' or 'Oracle\n"
" Application Server error'.\n"
" This option implies -w.\n"
"\") % linkcheck.Config.LoggerKeys\n"
msgstr ""
"BENUTZUNG\tlinkchecker [options] datei_oder_url...\n"
"\n"
"OPTIONEN\n"
"-a, --anchors\n"
" Prüfe interne URLs. Standard ist keine Prüfung.\n"
"-d, --denyallow\n"
" Tausche die Prüfreihenfolge zu extern/intern. Standardmäßige\n"
" Reihenfolge ist intern/extern.\n"
"-D, --debug\n"
" Drucke zusätzlich Debug Information.\n"
"-e regex, --extern=regex\n"
" Behandle URLs welche diesen Ausdruck matchen als extern.\n"
" Nur interne HTTP Links werden rekursiv geprüft.\n"
"-f file, --config=file\n"
" Benutze file als Konfigurationsdatei. LinkChecker sucht zuerst\n"
" ~/.linkcheckerrc und dann /etc/linkcheckerrc\n"
" (unter Windows <Pfad-zum-Programm>\\linkcheckerrc).\n"
"-F type, --file-output=type\n"
" Wie --output, aber schreibe in eine Datei (Standard ist\n"
" linkchecker-out.<type>)\n"
" Falls die Datei bereits existiert wird sie überschrieben.\n"
" Sie können diese Option mehr als einmal verwenden. Es gibt keine\n"
" Ausgabedatei für den blacklist Logger. Standard ist keine Ausgabe\n"
" in eine Datei.\n"
"-i regex, --intern=regex\n"
" Behandle URLs welche diese Ausdruck matchen als intern.\n"
"-h, --help\n"
" Hilf mir! Druche Nutzungsinformation für dieses Programm.\n"
"-N, --nntp-server\n"
" Gibt ein NNTP Rechner für 'news:...' Links. Standard ist die\n"
" Umgebungsvariable NNTP_SERVER. Falls kein Rechner angegeben ist,\n"
" wird lediglich auf korrekte Syntax des Links geprüft.\n"
"-o type, --output=type\n"
" Verwende die Ausgabe als %s.\n"
" Standard Ausgabe ist text.\n"
"-p pwd, --password=pwd\n"
" Verwende das angegebene Passwort für HTML und FTP Authorisation.\n"
" Standard ist 'guest@'. Siehe -u.\n"
"-q, --quiet\n"
" Keine Ausgabe. Dies ist nur in Verbindung mit -F nützlich.\n"
"-r depth, --recursion-level=depth\n"
" Prüfe rekursiv alle URLs bis zu der angegebenen Tiefe\n"
" (depth >= 0). Standard Tiefe ist 1.\n"
"-R, --robots-txt\n"
" Befolge den Robots Exclusion Standard.\n"
"-s, --strict\n"
" Prüfe lediglich die Syntax von externen URLs. Es wird keine.\n"
" Verbindung zu diesen Rechner aufgebaut.\n"
"-t num, --threads=num\n"
" Generiere nicht mehr als num Threads. Standard Anzahl von Threads\n"
" ist 5. Um Threading auszuschalten geben Sie eine nichtpositive\n"
" Anzahl an.\n"
"-u name, --user=name\n"
" Verwende den angegebenen Benutzernamen für HTML und FTP\n"
" Authorisation. Standard ist 'anonymous'. Siehe -p.\n"
"-V, --version\n"
" Drucke die Version und beende das Programm.\n"
"-v, --verbose\n"
" Logge alle geprüften URLs (impliziert -w). Standard ist es, nur\n"
" fehlerhafte URLs zu loggen.\n"
"-w, --warnings\n"
" Logge Warnungen.\n"
"-W regex, --warning-regex=regex\n"
" Definieren Sie einen regulären Ausdruck, der eine Warnung ausdruckt\n"
" falls er den Inhalt einer geprüften URL matcht.\n"
" Dies gilt natürlich nur für gültige Seiten deren Inhalt wir\n"
" bekommen können.\n"
" Sie können dies verwenden, um Seiten mit Fehlermeldungen wie z.B.\n"
" 'Diese Seite ist umgezogen' oder 'Oracle Server Fehler'.\n"
" Diese Option impliziert -w.\n"
msgid "Execute 'linkchecker -h' for help\n"
msgstr "Führen Sie 'linkchecker -h' aus, um Hilfe zu erhalten\n"
@ -320,157 +471,6 @@ msgstr "Ergebnis"
msgid "No NNTP server specified, skipping this URL"
msgstr "Kein NNTP Server angegeben; prüfe lediglich Syntax"
#, fuzzy
msgid ""
"USAGE\tlinkchecker [options] file-or-url...\n"
"\n"
"OPTIONS\n"
"For single-letter option arguments the space is not a necessity. So\n"
"'-o colored' is the same as '-ocolored'.\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTML links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F type[/filename], --file-output=type[/filename]\n"
" Same as -o, but write to a file linkchecker-out.<type>\n"
" or <filename> if specified. If the file already exists, it\n"
" is overwritten. You can specify this option more than once.\n"
" There is no file output for the blacklist logger. Default is\n"
" no file output.\n"
"-i regex, --intern=regex\n"
" Assume URLs that match the given expression as intern.\n"
" LinkChecker descends recursively only to intern URLs, not to "
"extern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-N server, --nntp-server=server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o type, --output=type\n"
" Specify output type as %s.\n"
" Default type is text.\n"
"-p pwd, --password=pwd\n"
" Try password pwd for HTML and FTP authorization.\n"
" Default password is 'joe@'. See also -u.\n"
"-P secs, --pause=secs\n"
" Pause <secs> seconds between each url check. Don't forget to\n"
"\tdisable threading with -t0 when you really want to wait.\n"
" Default is no pause between requests.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try username name for HTML and FTP authorization.\n"
" Default is 'anonymous'. See also -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" Use this to check for pages that contain some form of error\n"
" message, for example 'This page has moved' or 'Oracle\n"
" Application Server error'.\n"
" This option implies -w.\n"
"\") % linkcheck.Config.LoggerKeys\n"
msgstr ""
"BENUTZUNG\tlinkchecker [options] datei_oder_url...\n"
"\n"
"OPTIONEN\n"
"-a, --anchors\n"
" Prüfe interne URLs. Standard ist keine Prüfung.\n"
"-d, --denyallow\n"
" Tausche die Prüfreihenfolge zu extern/intern. Standardmäßige\n"
" Reihenfolge ist intern/extern.\n"
"-D, --debug\n"
" Drucke zusätzlich Debug Information.\n"
"-e regex, --extern=regex\n"
" Behandle URLs welche diesen Ausdruck matchen als extern.\n"
" Nur interne HTTP Links werden rekursiv geprüft.\n"
"-f file, --config=file\n"
" Benutze file als Konfigurationsdatei. LinkChecker sucht zuerst\n"
" ~/.linkcheckerrc und dann /etc/linkcheckerrc\n"
" (unter Windows <Pfad-zum-Programm>\\linkcheckerrc).\n"
"-F type, --file-output=type\n"
" Wie --output, aber schreibe in eine Datei (Standard ist\n"
" linkchecker-out.<type>)\n"
" Falls die Datei bereits existiert wird sie überschrieben.\n"
" Sie können diese Option mehr als einmal verwenden. Es gibt keine\n"
" Ausgabedatei für den blacklist Logger. Standard ist keine Ausgabe\n"
" in eine Datei.\n"
"-i regex, --intern=regex\n"
" Behandle URLs welche diese Ausdruck matchen als intern.\n"
"-h, --help\n"
" Hilf mir! Druche Nutzungsinformation für dieses Programm.\n"
"-N, --nntp-server\n"
" Gibt ein NNTP Rechner für 'news:...' Links. Standard ist die\n"
" Umgebungsvariable NNTP_SERVER. Falls kein Rechner angegeben ist,\n"
" wird lediglich auf korrekte Syntax des Links geprüft.\n"
"-o type, --output=type\n"
" Verwende die Ausgabe als %s.\n"
" Standard Ausgabe ist text.\n"
"-p pwd, --password=pwd\n"
" Verwende das angegebene Passwort für HTML und FTP Authorisation.\n"
" Standard ist 'guest@'. Siehe -u.\n"
"-q, --quiet\n"
" Keine Ausgabe. Dies ist nur in Verbindung mit -F nützlich.\n"
"-r depth, --recursion-level=depth\n"
" Prüfe rekursiv alle URLs bis zu der angegebenen Tiefe\n"
" (depth >= 0). Standard Tiefe ist 1.\n"
"-R, --robots-txt\n"
" Befolge den Robots Exclusion Standard.\n"
"-s, --strict\n"
" Prüfe lediglich die Syntax von externen URLs. Es wird keine.\n"
" Verbindung zu diesen Rechner aufgebaut.\n"
"-t num, --threads=num\n"
" Generiere nicht mehr als num Threads. Standard Anzahl von Threads\n"
" ist 5. Um Threading auszuschalten geben Sie eine nichtpositive\n"
" Anzahl an.\n"
"-u name, --user=name\n"
" Verwende den angegebenen Benutzernamen für HTML und FTP\n"
" Authorisation. Standard ist 'anonymous'. Siehe -p.\n"
"-V, --version\n"
" Drucke die Version und beende das Programm.\n"
"-v, --verbose\n"
" Logge alle geprüften URLs (impliziert -w). Standard ist es, nur\n"
" fehlerhafte URLs zu loggen.\n"
"-w, --warnings\n"
" Logge Warnungen.\n"
"-W regex, --warning-regex=regex\n"
" Definieren Sie einen regulären Ausdruck, der eine Warnung ausdruckt\n"
" falls er den Inhalt einer geprüften URL matcht.\n"
" Dies gilt natürlich nur für gültige Seiten deren Inhalt wir\n"
" bekommen können.\n"
" Sie können dies verwenden, um Seiten mit Fehlermeldungen wie z.B.\n"
" 'Diese Seite ist umgezogen' oder 'Oracle Server Fehler'.\n"
" Diese Option impliziert -w.\n"
msgid "Parent URL"
msgstr "Vater URL"

302
po/fr.po
View file

@ -167,6 +167,157 @@ msgstr "valeur du niveau de r
msgid "%.3f seconds"
msgstr "%.3f secondes"
#, fuzzy
msgid ""
"USAGE\tlinkchecker [options] file-or-url...\n"
"\n"
"OPTIONS\n"
"For single-letter option arguments the space is not a necessity. So\n"
"'-o colored' is the same as '-ocolored'.\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTML links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F type[/filename], --file-output=type[/filename]\n"
" Same as -o, but write to a file linkchecker-out.<type>\n"
" or <filename> if specified. If the file already exists, it\n"
" is overwritten. You can specify this option more than once.\n"
" There is no file output for the blacklist logger. Default is\n"
" no file output.\n"
"-i regex, --intern=regex\n"
" Assume URLs that match the given expression as intern.\n"
" LinkChecker descends recursively only to intern URLs, not to "
"extern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-N server, --nntp-server=server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o type, --output=type\n"
" Specify output type as %s.\n"
" Default type is text.\n"
"-p pwd, --password=pwd\n"
" Try password pwd for HTML and FTP authorization.\n"
" Default password is 'joe@'. See also -u.\n"
"-P secs, --pause=secs\n"
" Pause <secs> seconds between each url check. This option\n"
"\timplies -t0.\n"
" Default is no pause between requests.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try username name for HTML and FTP authorization.\n"
" Default is 'anonymous'. See also -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" Use this to check for pages that contain some form of error\n"
" message, for example 'This page has moved' or 'Oracle\n"
" Application Server error'.\n"
" This option implies -w.\n"
"\") % linkcheck.Config.LoggerKeys\n"
msgstr ""
"USAGE\tlinkchecker [options] fichier_ou_url...\n"
"\n"
"OPTIONS\n"
"-a, --anchors\n"
" Contrôle les références ancrées. Par défaut, il ne les contrôle pas.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Affiche des informations de débugage supplémentaires.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTTP links are checked recursively.\n"
"-f file, --config=file\n"
" Utilise le fichier comme fichier de configuration. LinkChecker "
"recherche d'abord\n"
" ~/.linkcheckerrc puis /etc/linkcheckerrc\n"
" (sous Windows <chemin-vers-le-programe>\\linkcheckerrc).\n"
"-F name, --file-output=name\n"
" Identique à output, mais écrit dans un fichier linkchecker-out.<nom>.\n"
" Si le fichier existe, il sera écrasé. Vous pouvez spécifier\n"
" cette option plus d'une fois. Il n'y a pas de fichier de sotie pour "
"les\n"
" logs de la liste noire. Par défaut, il n'y a pas de fichier de sortie.\n"
"-i regex, --intern=regex\n"
" Assume urls that match the given expression as intern.\n"
"-h, --help\n"
" Aide moi! Affiche les informations d'utilisation pour ce programme.\n"
"-N, --nntp-server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o name, --output=name\n"
" Specify output as %s.\n"
" Default is text.\n"
"-p pwd, --password=pwd\n"
" Try given password for HTML and FTP authorization.\n"
" Default is 'guest@'. See -u.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Contrôle seulement la syntaxe des liens externes, et ne pas essayer\n"
" de s'y connecter.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Essayer le nom d'utilisateur donné pour l'autorisation HTTP et FTP.\n"
" La valeur par défaut est 'anonymous'. Regarder à -p.\n"
"-V, --version\n"
" Affiche la version et quitte.\n"
"-v, --verbose\n"
" Logger toutes les URLs contôlées (suppose -w). Par défaut, seulement les URLS\n"
" invalides sont logguées.\n"
"-w, --warnings\n"
" Logger les avertissements.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" You can use this to check for pages that contain some form of\n"
" error message, for example 'This page has moved' or\n"
" 'Oracle Application Server error'.\n"
" This option implies -w.\n"
msgid "Execute 'linkchecker -h' for help\n"
msgstr "Exécuter 'linkchecker -h' pour obtenir l'aide\n"
@ -310,157 +461,6 @@ msgstr "R
msgid "No NNTP server specified, skipping this URL"
msgstr "Auncun serveur NNTP spécifié, analyse de la syntaxe seulement"
#, fuzzy
msgid ""
"USAGE\tlinkchecker [options] file-or-url...\n"
"\n"
"OPTIONS\n"
"For single-letter option arguments the space is not a necessity. So\n"
"'-o colored' is the same as '-ocolored'.\n"
"-a, --anchors\n"
" Check anchor references. Default is don't check anchors.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Print additional debugging information.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTML links are checked recursively.\n"
"-f file, --config=file\n"
" Use file as configuration file. LinkChecker first searches\n"
" ~/.linkcheckerrc and then /etc/linkcheckerrc\n"
" (under Windows <path-to-program>\\linkcheckerrc).\n"
"-F type[/filename], --file-output=type[/filename]\n"
" Same as -o, but write to a file linkchecker-out.<type>\n"
" or <filename> if specified. If the file already exists, it\n"
" is overwritten. You can specify this option more than once.\n"
" There is no file output for the blacklist logger. Default is\n"
" no file output.\n"
"-i regex, --intern=regex\n"
" Assume URLs that match the given expression as intern.\n"
" LinkChecker descends recursively only to intern URLs, not to "
"extern.\n"
"-h, --help\n"
" Help me! Print usage information for this program.\n"
"-N server, --nntp-server=server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o type, --output=type\n"
" Specify output type as %s.\n"
" Default type is text.\n"
"-p pwd, --password=pwd\n"
" Try password pwd for HTML and FTP authorization.\n"
" Default password is 'joe@'. See also -u.\n"
"-P secs, --pause=secs\n"
" Pause <secs> seconds between each url check. Don't forget to\n"
"\tdisable threading with -t0 when you really want to wait.\n"
" Default is no pause between requests.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Check only syntax of extern links, do not try to connect to them.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Try username name for HTML and FTP authorization.\n"
" Default is 'anonymous'. See also -p.\n"
"-V, --version\n"
" Print version and exit.\n"
"-v, --verbose\n"
" Log all checked URLs (implies -w). Default is to log only invalid\n"
" URLs.\n"
"-w, --warnings\n"
" Log warnings.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" Use this to check for pages that contain some form of error\n"
" message, for example 'This page has moved' or 'Oracle\n"
" Application Server error'.\n"
" This option implies -w.\n"
"\") % linkcheck.Config.LoggerKeys\n"
msgstr ""
"USAGE\tlinkchecker [options] fichier_ou_url...\n"
"\n"
"OPTIONS\n"
"-a, --anchors\n"
" Contrôle les références ancrées. Par défaut, il ne les contrôle pas.\n"
"-d, --denyallow\n"
" Swap checking order to extern/intern. Default checking order\n"
" is intern/extern.\n"
"-D, --debug\n"
" Affiche des informations de débugage supplémentaires.\n"
"-e regex, --extern=regex\n"
" Assume urls that match the given expression as extern.\n"
" Only intern HTTP links are checked recursively.\n"
"-f file, --config=file\n"
" Utilise le fichier comme fichier de configuration. LinkChecker "
"recherche d'abord\n"
" ~/.linkcheckerrc puis /etc/linkcheckerrc\n"
" (sous Windows <chemin-vers-le-programe>\\linkcheckerrc).\n"
"-F name, --file-output=name\n"
" Identique à output, mais écrit dans un fichier linkchecker-out.<nom>.\n"
" Si le fichier existe, il sera écrasé. Vous pouvez spécifier\n"
" cette option plus d'une fois. Il n'y a pas de fichier de sotie pour "
"les\n"
" logs de la liste noire. Par défaut, il n'y a pas de fichier de sortie.\n"
"-i regex, --intern=regex\n"
" Assume urls that match the given expression as intern.\n"
"-h, --help\n"
" Aide moi! Affiche les informations d'utilisation pour ce programme.\n"
"-N, --nntp-server\n"
" Specify an NNTP server for 'news:...' links. Default is the\n"
" environment variable NNTP_SERVER. If no host is given,\n"
" only the syntax of the link is checked.\n"
"-o name, --output=name\n"
" Specify output as %s.\n"
" Default is text.\n"
"-p pwd, --password=pwd\n"
" Try given password for HTML and FTP authorization.\n"
" Default is 'guest@'. See -u.\n"
"-q, --quiet\n"
" Quiet operation. This is only useful with -F.\n"
"-r depth, --recursion-level=depth\n"
" Check recursively all links up to given depth (depth >= 0).\n"
" Default depth is 1.\n"
"-R, --robots-txt\n"
" Obey the robots exclusion standard.\n"
"-s, --strict\n"
" Contrôle seulement la syntaxe des liens externes, et ne pas essayer\n"
" de s'y connecter.\n"
"-t num, --threads=num\n"
" Generate no more than num threads. Default number of threads is 5.\n"
" To disable threading specify a non-positive number.\n"
"-u name, --user=name\n"
" Essayer le nom d'utilisateur donné pour l'autorisation HTTP et FTP.\n"
" La valeur par défaut est 'anonymous'. Regarder à -p.\n"
"-V, --version\n"
" Affiche la version et quitte.\n"
"-v, --verbose\n"
" Logger toutes les URLs contôlées (suppose -w). Par défaut, seulement les URLS\n"
" invalides sont logguées.\n"
"-w, --warnings\n"
" Logger les avertissements.\n"
"-W regex, --warning-regex=regex\n"
" Define a regular expression which prints a warning if it matches\n"
" any content of the checked link.\n"
" This applies of course only to pages which are valid, so we can\n"
" get their content.\n"
" You can use this to check for pages that contain some form of\n"
" error message, for example 'This page has moved' or\n"
" 'Oracle Application Server error'.\n"
" This option implies -w.\n"
msgid "Parent URL"
msgstr "URL Parente"

View file

@ -130,6 +130,12 @@ class MyDistribution(Distribution):
def run_commands(self):
if "config" not in self.commands:
self.check_ssl()
if "bdist_wininst" in self.commands:
# enable .bat file as a script
self.scripts.append('linkchecker.bat')
else:
# man page for POSIX systems
self.data_files.append(('man/man1', ['linkchecker.1']))
Distribution.run_commands(self)
@ -269,6 +275,5 @@ o a (Fast)CGI web interface (requires HTTP server)
['linkchecker.bat', 'lconline/leer.html',
'lconline/index.html', 'lconline/lc_cgi.html',
'lc.cgi','lc.fcgi','lc.sz_fcgi']),
('man/man1', ['linkchecker.1']),
],
)