Add function documentation.

This commit is contained in:
Bastian Kleineidam 2011-02-14 21:06:34 +01:00
parent d0c1c5dd27
commit c5884b8d87
25 changed files with 131 additions and 129 deletions

View file

@ -31,20 +31,24 @@ import _LinkChecker_configdata as configdata
def main_is_frozen ():
"""Return True iff running inside a py2exe-generated executable."""
return hasattr(sys, "frozen")
def module_path ():
"""Return absolute directory of system executable."""
return os.path.dirname(os.path.abspath(sys.executable))
def get_install_data ():
"""Return absolute path of LinkChecker data installation directory."""
if main_is_frozen():
return module_path()
return configdata.install_data
def get_config_dir ():
"""Return absolute path of LinkChecker configuration directory."""
if main_is_frozen():
return os.path.join(module_path(), "share", "linkchecker")
return configdata.config_dir

View file

@ -28,6 +28,9 @@ addrinfos = LFUCache(size=10000)
@synchronized(_lock)
def getaddrinfo (host, port):
"""Determine address information for given host and port for
streaming sockets (SOCK_STREAM).
Already cached information is used."""
key = u"%s:%s" % (unicode(host), unicode(port))
if key in addrinfos:
value = addrinfos[key]

View file

@ -31,6 +31,9 @@ class CookieJar (object):
"""
def __init__ (self):
"""Initialize empty per-host cookie cache."""
# mapping { hostname -> Jar implementation }
# with Jar implemenations coming from the cookies module
self.cache = {}
@synchronized(_lock)
@ -66,4 +69,5 @@ class CookieJar (object):
@synchronized(_lock)
def __str__ (self):
"""Return stored cookies as string."""
return "<CookieJar with %s>" % self.cache

View file

@ -34,13 +34,13 @@ class RobotsTxt (object):
"""
def __init__ (self):
"""Initialize per-URL robots.txt cache."""
# mapping {URL -> parsed robots.txt}
self.cache = LFUCache(size=100)
@synchronized(_lock)
def allows_url (self, roboturl, url, proxy, user, password, callback=None):
"""
Ask robots.txt allowance.
"""
"""Ask robots.txt allowance."""
useragent = str(configuration.UserAgent)
if roboturl not in self.cache:
rp = robotparser2.RobotFileParser(proxy=proxy, user=user,

View file

@ -64,11 +64,15 @@ class UrlQueue (object):
return len(self.queue)
def empty (self):
"""Return True if the queue is empty, False otherwise (not reliable!)."""
"""Return True if the queue is empty, False otherwise.
Result is thread-safe, but not reliable since the queue could have
been changed before the result is returned!"""
with self.mutex:
return self._empty()
def _empty (self):
"""Return True if the queue is empty, False otherwise.
Not thread-safe!"""
return not self.queue
def get (self, timeout=None):
@ -80,6 +84,8 @@ class UrlQueue (object):
return self._get(timeout)
def _get (self, timeout):
"""Non thread-safe utility function of self.get() doing the real
work."""
if timeout is None:
while self._empty():
self.not_empty.wait()
@ -247,9 +253,7 @@ class UrlQueue (object):
self.shutdown = True
def status (self):
"""
Get tuple (finished tasks, in progress, queue size).
"""
"""Get tuple (finished tasks, in progress, queue size)."""
with self.mutex:
return (self.finished_tasks,
len(self.in_progress), len(self.queue))

View file

@ -136,11 +136,14 @@ class StoringHandler (logging.Handler):
Used by the CSS syntax checker."""
def __init__ (self, maxrecords=100):
"""Initialize site-limited list."""
logging.Handler.__init__(self)
self.storage = []
self.maxrecords = maxrecords
def emit (self, record):
"""Save message record. If storage site is exceeded, remove
oldest message record."""
if len(self.storage) >= self.maxrecords:
self.storage.pop()
self.storage.append(record)

View file

@ -235,9 +235,7 @@ class FileUrl (urlbase.UrlBase):
return self.get_content_type() in self.ContentMimetypes
def parse_url (self):
"""
Parse file contents for new links to check.
"""
"""Parse file contents for new links to check."""
if self.is_directory():
self.parse_html()
elif firefox.has_sqlite and firefox.extension.search(self.url):
@ -257,6 +255,8 @@ class FileUrl (urlbase.UrlBase):
self.aggregate.urlqueue.put(url_data)
def get_content_type (self):
"""Return URL content type, or an empty string if content
type could not be found."""
if self.content_type is None:
if self.url:
self.content_type = fileutil.guess_mimetype(self.url, read=self.get_content)
@ -265,8 +265,7 @@ class FileUrl (urlbase.UrlBase):
return self.content_type
def get_intern_pattern (self):
"""
Get pattern for intern URL matching.
"""Get pattern for intern URL matching.
@return non-empty regex pattern or None
@rtype String or None

View file

@ -208,6 +208,8 @@ class FtpUrl (internpaturl.InternPatternUrl, proxysupport.ProxySupport):
getattr(self, "parse_"+key)()
def get_content_type (self, read=None):
"""Return URL content type, or an empty string if content
type could not be found."""
if self.content_type is None:
self.content_type = fileutil.guess_mimetype(self.url, read=read)
return self.content_type

View file

@ -515,6 +515,8 @@ class HttpUrl (internpaturl.InternPatternUrl, proxysupport.ProxySupport):
# the connection after HEAD.
# Example: http://www.empleo.gob.mx (Apache/1.3.33 (Unix) mod_jk)
self.persistent = False
# Note that for POST method the connection should also be closed,
# but this method is never used.
if self.persistent and (self.method == "GET" or
self.headers.getheader("Content-Length") != "0"):
# always read content from persistent connections

View file

@ -48,7 +48,7 @@ def getaddresses (addr):
def is_quoted (addr):
"""Return True iff address string is quoted."""
"""Return True iff mail address string is quoted."""
return addr.startswith(u'"') and addr.endswith(u'"')
@ -60,6 +60,7 @@ def is_literal (domain):
_remove_quoted = re.compile(ur'\\.').sub
_quotes = re.compile(ur'["\\]')
def is_missing_quote (addr):
"""Return True iff mail address is not correctly quoted."""
return _quotes.match(_remove_quoted(u"", addr[1:-1]))

View file

@ -68,14 +68,10 @@ is_unknown_url = ignored_schemes_re.search
class UnknownUrl (urlbase.UrlBase):
"""
Handle unknown or just plain broken URLs.
"""
"""Handle unknown or just plain broken URLs."""
def local_check (self):
"""
Only logs that this URL is unknown.
"""
"""Only logs that this URL is unknown."""
self.set_extern(self.url)
if self.extern[0] and self.extern[1]:
self.add_info(_("Outside of domain filter, checked only syntax."))
@ -88,11 +84,11 @@ class UnknownUrl (urlbase.UrlBase):
valid=False)
def ignored (self):
"""Return True if this URL scheme is ignored."""
return ignored_schemes_re.search(self.url)
def can_get_content (self):
"""
Unknown URLs have no content.
"""Unknown URLs have no content.
@return: False
@rtype: bool

View file

@ -712,6 +712,7 @@ class UrlBase (object):
self.scan_virus()
def check_warningregex (self):
"""Check if content matches a given regular expression."""
warningregex = self.aggregate.config["warningregex"]
if warningregex:
log.debug(LOG_CHECK, "checking content")
@ -1132,6 +1133,8 @@ class UrlBase (object):
)
def to_wire (self):
"""Return compact UrlData object with information from to_wire_dict().
"""
return CompactUrlData(self.to_wire_dict())
@ -1165,6 +1168,7 @@ urlDataAttr = [
]
class CompactUrlData (object):
"""Store selected UrlData attributes in slots to minimize memory usage."""
__slots__ = urlDataAttr
def __init__(self, wired_url_data):

View file

@ -252,6 +252,7 @@ class Configuration (dict):
self.sanitize()
def add_auth (self, user=None, password=None, pattern=None):
"""Add given authentication data."""
if not user or not pattern:
log.warn(LOG_CHECK,
_("warning: missing user or URL pattern in authentication data."))
@ -292,6 +293,7 @@ class Configuration (dict):
self.sanitize_loginurl()
def sanitize_anchors (self):
"""Make anchor configuration consistent."""
if not self["warnings"]:
self["warnings"] = True
from ..checker import Warnings
@ -300,12 +302,14 @@ class Configuration (dict):
self["ignorewarnings"].remove('url-anchor-not-found')
def sanitize_logger (self):
"""Make logger configuration consistent."""
if not self['output']:
log.warn(LOG_CHECK, _("warning: activating text logger output."))
self['output'] = 'text'
self['logger'] = self.logger_new(self['output'])
def sanitize_checkhtml (self):
"""Ensure HTML tidy is installed for checking HTML."""
try:
import tidy
except ImportError:
@ -315,6 +319,7 @@ class Configuration (dict):
self['checkhtml'] = False
def sanitize_checkcss (self):
"""Ensure cssutils is installed for checking CSS."""
try:
import cssutils
except ImportError:
@ -324,6 +329,7 @@ class Configuration (dict):
self['checkcss'] = False
def sanitize_scanvirus (self):
"""Ensure clamav is installed for virus checking."""
try:
clamav.init_clamav_conf(self['clamavconf'])
except clamav.ClamavError:
@ -332,12 +338,14 @@ class Configuration (dict):
self['scanvirus'] = False
def sanitize_cookies (self):
"""Make cookie configuration consistent."""
if not self['sendcookies']:
log.warn(LOG_CHECK, _("warning: activating sendcookies " \
"because storecookies is active."))
self['sendcookies'] = True
def sanitize_loginurl (self):
"""Make login configuration consistent."""
url = self["loginurl"]
disable = False
if not self["loginpasswordfield"]:

View file

@ -36,6 +36,7 @@ class LCConfigParser (ConfigParser.RawConfigParser, object):
"""
def __init__ (self, config):
"""Initialize configuration."""
super(LCConfigParser, self).__init__()
self.config = config

View file

@ -31,6 +31,7 @@ class Aggregate (object):
"""Store thread-safe data collections for checker threads."""
def __init__ (self, config, urlqueue, connections, cookies, robots_txt):
"""Store given link checking objects."""
self.config = config
self.urlqueue = urlqueue
self.connections = connections
@ -61,6 +62,7 @@ class Aggregate (object):
checker.check_url(self.urlqueue, self.logger)
def print_active_threads (self):
"""Log all currently active threads."""
first = True
for t in self.threads:
name = t.getName()

View file

@ -39,9 +39,11 @@ class StatusLogger (object):
"""Standard status logger. Default output is stderr."""
def __init__ (self, fd=stderr):
"""Save file descriptor for logging."""
self.fd = fd
def log_status (self, checked, in_progress, queue, duration):
"""Write status message to file descriptor."""
msg = _n("%2d URL active", "%2d URLs active", in_progress) % \
in_progress
self.write(u"%s, " % msg)
@ -54,12 +56,15 @@ class StatusLogger (object):
self.flush()
def write (self, msg):
"""Write message to file descriptor."""
self.fd.write(msg)
def writeln (self, msg):
"""Write status message and line break to file descriptor."""
self.fd.write(u"%s%s" % (msg, unicode(os.linesep)))
def flush (self):
"""Flush file descriptor."""
self.fd.flush()

View file

@ -22,11 +22,10 @@ _lock = threading.Lock()
class Logger (object):
"""
Thread safe multi-logger class used by aggregator instances.
"""
"""Thread safe multi-logger class used by aggregator instances."""
def __init__ (self, config):
"""Initialize basic logging variables."""
self.logs = [config['logger']]
self.logs.extend(config['fileoutput'])
self.ignorewarnings = config["ignorewarnings"]

View file

@ -14,6 +14,7 @@
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Parser for FireFox bookmark file."""
import os
import glob
import re
@ -44,6 +45,8 @@ def get_profile_dir ():
def find_bookmark_file ():
"""Return the first found places.sqlite file of the profile directories
ending with '.default'.
Returns absolute filename if found, or empty string if no bookmark file
could be found.
"""
for dirname in glob.glob(u"%s/*.default" % get_profile_dir()):
if os.path.isdir(dirname):
@ -54,6 +57,10 @@ def find_bookmark_file ():
def parse_bookmark_file (filename):
"""Return iterator for bookmarks of the form (url, name).
Bookmarks are not sorted.
Returns None if sqlite3 module is not installed.
"""
if not has_sqlite:
return
conn = sqlite3.connect(filename, timeout=0.5)

View file

@ -63,9 +63,11 @@ class LogStatistics (object):
"""
def __init__ (self):
"""Initialize log statistics."""
self.reset()
def reset (self):
"""Reset all log statistics to default values."""
# number of logged urls
self.number = 0
# number of encountered errors
@ -84,6 +86,7 @@ class LogStatistics (object):
self.avg_number = 0
def log_url (self, url_data, do_print):
"""Log URL statistics."""
self.number += 1
if not url_data.valid:
self.errors += 1
@ -191,6 +194,7 @@ class Logger (object):
self.fd = Writer(sys.stdout, self.codec_errors)
def start_fileoutput (self):
"""Start output to configured file."""
path = os.path.dirname(self.filename)
try:
if path and not os.path.isdir(path):

View file

@ -30,9 +30,7 @@ class CSVLogger (Logger):
"""
def __init__ (self, **args):
"""
Store default separator and (os dependent) line terminator.
"""
"""Store default separator and (os dependent) line terminator."""
super(CSVLogger, self).__init__(**args)
# due to a limitation of the csv module, all output has to be
# utf-8 encoded
@ -47,16 +45,12 @@ class CSVLogger (Logger):
return open(self.filename, "wb")
def comment (self, s, **args):
"""
Write CSV comment.
"""
"""Write CSV comment."""
self.write(u"# ")
self.writeln(s=s, **args)
def start_output (self):
"""
Write checking start info as csv comment.
"""
"""Write checking start info as csv comment."""
super(CSVLogger, self).start_output()
row = []
if self.has_part("intro"):
@ -89,9 +83,7 @@ class CSVLogger (Logger):
self.writerow(row)
def log_url (self, url_data):
"""
Write csv formatted url check info.
"""
"""Write csv formatted url check info."""
row = []
for s in (url_data.base_url,
url_data.parent_url, url_data.base_ref,
@ -108,12 +100,11 @@ class CSVLogger (Logger):
self.flush()
def writerow (self, row):
"""Write one row in CSV format."""
self.writer.writerow([self.encode(s) for s in row])
def end_output (self):
"""
Write end of checking info as csv comment.
"""
"""Write end of checking info as csv comment."""
if self.has_part("outro"):
self.write_outro()
self.close_fileoutput()

View file

@ -76,4 +76,5 @@ class DOTLogger (GraphLogger):
def dotquote (s):
"""Quote string for usage in DOT output format."""
return s.replace('"', '\\"')

View file

@ -51,14 +51,10 @@ HTML_HEADER = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
class HtmlLogger (Logger):
"""
Logger with HTML output.
"""
"""Logger with HTML output."""
def __init__ (self, **args):
"""
Initialize default HTML color values.
"""
"""Initialize default HTML color values."""
super(HtmlLogger, self).__init__(**args)
self.init_fileoutput(args)
self.colorbackground = args['colorbackground']
@ -70,15 +66,11 @@ class HtmlLogger (Logger):
self.colorok = args['colorok']
def part (self, name):
"""
Return non-space-breakable part name.
"""
"""Return non-space-breakable part name."""
return super(HtmlLogger, self).part(name).replace(" ", "&nbsp;")
def comment (self, s, **args):
"""
Write HTML comment.
"""
"""Write HTML comment."""
self.write(u"<!-- ")
self.write(s, **args)
self.write(u" -->")
@ -107,9 +99,7 @@ class HtmlLogger (Logger):
self.flush()
def log_url (self, url_data):
"""
Write url checking info as HTML.
"""
"""Write url checking info as HTML."""
self.write_table_start()
if self.has_part("url"):
self.write_url(url_data)
@ -137,9 +127,7 @@ class HtmlLogger (Logger):
self.flush()
def write_table_start (self):
"""
Start html table.
"""
"""Start html table."""
self.writeln(u'<br clear="all"><br>')
self.writeln(u'<table align="left" border="0" cellspacing="0"'
u' cellpadding="1"')
@ -173,16 +161,12 @@ class HtmlLogger (Logger):
self.writeln(u"</td></tr>")
def write_name (self, url_data):
"""
Write url_data.name.
"""
"""Write url_data.name."""
args = (self.part("name"), cgi.escape(url_data.name))
self.writeln(u"<tr><td>%s</td><td>`%s'</td></tr>" % args)
def write_parent (self, url_data):
"""
Write url_data.parent_url.
"""
"""Write url_data.parent_url."""
self.write(u"<tr><td>"+self.part("parenturl")+
u'</td><td><a target="top" href="'+
url_data.parent_url+u'">'+
@ -199,56 +183,42 @@ class HtmlLogger (Logger):
self.writeln(u"</td></tr>")
def write_base (self, url_data):
"""
Write url_data.base_ref.
"""
"""Write url_data.base_ref."""
self.writeln(u"<tr><td>"+self.part("base")+u"</td><td>"+
cgi.escape(url_data.base_ref)+u"</td></tr>")
def write_real (self, url_data):
"""
Write url_data.url.
"""
"""Write url_data.url."""
self.writeln("<tr><td>"+self.part("realurl")+u"</td><td>"+
u'<a target="top" href="'+url_data.url+
u'">'+cgi.escape(url_data.url)+u"</a></td></tr>")
def write_dltime (self, url_data):
"""
Write url_data.dltime.
"""
"""Write url_data.dltime."""
self.writeln(u"<tr><td>"+self.part("dltime")+u"</td><td>"+
(_("%.3f seconds") % url_data.dltime)+
u"</td></tr>")
def write_dlsize (self, url_data):
"""
Write url_data.dlsize.
"""
"""Write url_data.dlsize."""
self.writeln(u"<tr><td>"+self.part("dlsize")+u"</td><td>"+
strformat.strsize(url_data.dlsize)+
u"</td></tr>")
def write_checktime (self, url_data):
"""
Write url_data.checktime.
"""
"""Write url_data.checktime."""
self.writeln(u"<tr><td>"+self.part("checktime")+u"</td><td>"+
(_("%.3f seconds") % url_data.checktime)+u"</td></tr>")
def write_info (self, url_data):
"""
Write url_data.info.
"""
"""Write url_data.info."""
sep = u"<br>"+os.linesep
text = sep.join(cgi.escape(x) for x in url_data.info)
self.writeln(u'<tr><td valign="top">' + self.part("info")+
u"</td><td>"+text+u"</td></tr>")
def write_warning (self, url_data):
"""
Write url_data.warnings.
"""
"""Write url_data.warnings."""
sep = u"<br>"+os.linesep
text = sep.join(cgi.escape(x) for x in url_data.warnings)
self.writeln(u'<tr><td bgcolor="' + self.colorwarning + u'" '+
@ -257,9 +227,7 @@ class HtmlLogger (Logger):
text + u"</td></tr>")
def write_result (self, url_data):
"""
Write url_data.result.
"""
"""Write url_data.result."""
if url_data.valid:
self.write(u'<tr><td bgcolor="%s">' % self.colorok)
self.write(self.part("result"))
@ -275,6 +243,7 @@ class HtmlLogger (Logger):
self.writeln(u"</td></tr>")
def write_stats (self):
"""Write check statistic infos."""
self.writeln(u'<br><i>%s</i><br>' % _("Statistics"))
if len(self.stats.domains) > 1:
self.writeln(_("Number of domains: %d") % len(self.stats.domains))
@ -294,6 +263,7 @@ class HtmlLogger (Logger):
self.writeln(u"<br>")
def write_outro (self):
"""Write end of check message."""
self.writeln(u"<br>")
self.write(_("That's it.")+" ")
if self.stats.number >= 0:
@ -329,9 +299,7 @@ class HtmlLogger (Logger):
self.writeln(u"</small></body></html>")
def end_output (self):
"""
Write end of checking info as HTML.
"""
"""Write end of checking info as HTML."""
if self.has_part("stats"):
self.write_stats()
if self.has_part("outro"):

View file

@ -80,6 +80,7 @@ class TextLogger (Logger):
self.colorreset = args.get('colorreset', 'default')
def init_fileoutput (self, args):
"""Colorize file output if possible."""
super(TextLogger, self).init_fileoutput(args)
if self.fd is not None:
self.fd = ansicolor.Colorizer(self.fd)
@ -90,15 +91,14 @@ class TextLogger (Logger):
self.fd = ansicolor.Colorizer(self.fd)
def start_output (self):
"""
Write generic start checking info.
"""
"""Write generic start checking info."""
super(TextLogger, self).start_output()
if self.has_part('intro'):
self.write_intro()
self.flush()
def write_intro (self):
"""Log introduction text."""
self.writeln(configuration.AppInfo)
self.writeln(configuration.Freeware)
self.writeln(_("Get the newest version at %(url)s") %
@ -111,9 +111,7 @@ class TextLogger (Logger):
strformat.strtime(self.starttime))
def log_url (self, url_data):
"""
Write url checking info.
"""
"""Write url checking info."""
if self.has_part('url'):
self.write_url(url_data)
if url_data.name and self.has_part('name'):
@ -153,16 +151,12 @@ class TextLogger (Logger):
self.writeln(txt, color=self.colorurl)
def write_name (self, url_data):
"""
Write url_data.name.
"""
"""Write url_data.name."""
self.write(self.part("name") + self.spaces("name"))
self.writeln(strformat.strline(url_data.name), color=self.colorname)
def write_parent (self, url_data):
"""
Write url_data.parent_url.
"""
"""Write url_data.parent_url."""
self.write(self.part('parenturl') + self.spaces("parenturl"))
txt = url_data.parent_url
txt += _(", line %d") % url_data.line
@ -170,61 +164,45 @@ class TextLogger (Logger):
self.writeln(txt, color=self.colorparent)
def write_base (self, url_data):
"""
Write url_data.base_ref.
"""
"""Write url_data.base_ref."""
self.write(self.part("base") + self.spaces("base"))
self.writeln(url_data.base_ref, color=self.colorbase)
def write_real (self, url_data):
"""
Write url_data.url.
"""
"""Write url_data.url."""
self.write(self.part("realurl") + self.spaces("realurl"))
self.writeln(unicode(url_data.url), color=self.colorreal)
def write_dltime (self, url_data):
"""
Write url_data.dltime.
"""
"""Write url_data.dltime."""
self.write(self.part("dltime") + self.spaces("dltime"))
self.writeln(_("%.3f seconds") % url_data.dltime,
color=self.colordltime)
def write_dlsize (self, url_data):
"""
Write url_data.dlsize.
"""
"""Write url_data.dlsize."""
self.write(self.part("dlsize") + self.spaces("dlsize"))
self.writeln(strformat.strsize(url_data.dlsize),
color=self.colordlsize)
def write_checktime (self, url_data):
"""
Write url_data.checktime.
"""
"""Write url_data.checktime."""
self.write(self.part("checktime") + self.spaces("checktime"))
self.writeln(_("%.3f seconds") % url_data.checktime,
color=self.colordltime)
def write_info (self, url_data):
"""
Write url_data.info.
"""
"""Write url_data.info."""
self.write(self.part("info") + self.spaces("info"))
self.writeln(self.wrap(url_data.info, 65), color=self.colorinfo)
def write_warning (self, url_data):
"""
Write url_data.warning.
"""
"""Write url_data.warning."""
self.write(self.part("warning") + self.spaces("warning"))
self.writeln(self.wrap(url_data.warnings, 65), color=self.colorwarning)
def write_result (self, url_data):
"""
Write url_data.result.
"""
"""Write url_data.result."""
self.write(self.part("result") + self.spaces("result"))
if url_data.valid:
color = self.colorvalid
@ -237,6 +215,7 @@ class TextLogger (Logger):
self.writeln()
def write_outro (self):
"""Write end of checking message."""
self.writeln()
self.write(_("That's it.") + " ")
self.write(_n("%d link checked.", "%d links checked.",
@ -261,6 +240,7 @@ class TextLogger (Logger):
"duration": strformat.strduration_long(duration)})
def write_stats (self):
"""Write check statistic info."""
self.writeln()
self.writeln(_("Statistics:"))
if len(self.stats.domains) > 1:
@ -278,9 +258,7 @@ class TextLogger (Logger):
self.writeln(_("No statistics available since zero URLs were checked."))
def end_output (self):
"""
Write end of output info, and flush all output buffers.
"""
"""Write end of output info, and flush all output buffers."""
if self.has_part('stats'):
self.write_stats()
if self.has_part('outro'):

View file

@ -37,6 +37,7 @@ class IfConfig (object):
IFF_AUTOMEDIA = 0x4000 # Auto media select active.
def __init__ (self):
"""Initialize a socket and determine ifreq structure size."""
# create a socket so we have a handle to query
self.sockfd = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Note that sizeof(struct ifreq) is not always 32
@ -44,6 +45,7 @@ class IfConfig (object):
self.ifr_size = ifreq_size()
def _ioctl (self, func, args):
"""Call ioctl() with given parameters."""
import fcntl
return fcntl.ioctl(self.sockfd.fileno(), func, args)
@ -52,6 +54,7 @@ class IfConfig (object):
return struct.pack("%ds" % self.ifr_size, ifname)
def _getaddr (self, ifname, func):
"""Get interface address."""
try:
result = self._ioctl(func, self._getifreq(ifname))
except IOError, msg:

View file

@ -93,6 +93,7 @@ def cnormpath (path):
class MyInstallLib (install_lib, object):
"""Custom library installation."""
def install (self):
"""Install the generated config file."""
@ -104,6 +105,7 @@ class MyInstallLib (install_lib, object):
return outs
def create_conf_file (self):
"""Create configuration file."""
cmd_obj = self.distribution.get_command_obj("install")
cmd_obj.ensure_finalized()
# we have to write a configuration file because we need the
@ -176,6 +178,7 @@ class MyDistribution (Distribution, object):
"""Custom distribution class generating config file."""
def __init__ (self, attrs):
"""Set console and windows scripts."""
super(MyDistribution, self).__init__(attrs)
self.console = ['linkchecker']
self.windows = [{
@ -313,6 +316,7 @@ class MyBuild (build, object):
msgfmt.make(src, build_dst)
def run (self):
"""Check MANIFEST and build message files before building."""
check_manifest()
self.build_message_files()
build.run(self)
@ -322,6 +326,7 @@ class MyClean (clean, object):
"""Custom clean command."""
def run (self):
"""Remove share directory on clean."""
if self.all:
# remove share directory
directory = os.path.join("build", "share")
@ -393,10 +398,12 @@ if os.name == 'posix':
class InnoScript:
"""Class to generate INNO script."""
def __init__(self, lib_dir, dist_dir, windows_exe_files=[],
console_exe_files=[], service_exe_files=[],
comserver_files=[], lib_files=[]):
"""Store INNO script infos."""
self.lib_dir = lib_dir
self.dist_dir = dist_dir
if not self.dist_dir[-1] in "\\/":
@ -410,10 +417,12 @@ class InnoScript:
self.lib_files = [self.chop(p) for p in lib_files]
def chop(self, pathname):
"""Remove distribution directory from path name."""
assert pathname.startswith(self.dist_dir)
return pathname[len(self.dist_dir):]
def create(self, pathname="dist\\omt.iss"):
"""Create Inno script."""
self.pathname = pathname
ofi = self.file = open(pathname, "w")
print >> ofi, "; WARNING: This script has been created by py2exe. Changes to this script"
@ -455,6 +464,7 @@ class InnoScript:
print >> ofi, r'Filename: "{app}\vcredist_x86.exe"; StatusMsg: "Installing Microsoft dependencies"; Parameters: "/q:a"; Flags: waituntilterminated shellexec'
def compile(self):
"""Compile Inno script."""
import ctypes
res = ctypes.windll.shell32.ShellExecuteA(0, "compile",
self.pathname, None, None, 0)
@ -469,6 +479,7 @@ try:
You need InnoSetup for it."""
def run (self):
"""Generate py2exe installer."""
# First, let py2exe do it's work.
py2exe_build.run(self)
lib_dir = self.lib_dir
@ -492,10 +503,12 @@ try:
script.compile()
except ImportError:
class MyPy2exe:
"""Dummy py2exe class."""
pass
class MyRegister (register, object):
"""Custom register command."""
def build_post_data(self, action):
"""Force application name to lower case."""