use correct value for status output of checked links

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@2773 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2005-07-17 23:26:13 +00:00
parent 9fa3a1216c
commit b94eec79ef
2 changed files with 10 additions and 6 deletions

View file

@ -44,6 +44,11 @@
Type: feature
Changed: linkchecker, linkcheck/logger/*xml*.py
* Use correct number of checked URLs in status output.
Type: bugfix
Closes: SF bug #1239943
Changed: linkcheck/checker/consumer.py
3.0 "The Jacket" (released 8.7.2005)
* Catch all check errors, not just the ones inside of URL checking.

View file

@ -67,6 +67,8 @@ class Consumer (object):
Initialize consumer data and threads.
"""
super(Consumer, self).__init__()
# number of consumed URLs
self._number = 0
self._config = config
self._cache = cache
self._threader = linkcheck.threader.Threader(num=config['threads'])
@ -114,7 +116,7 @@ class Consumer (object):
# log before putting it in the cache (otherwise we would see
# a "(cached)" after every url
self._log_url(url_data)
if not url_data.cached and url_data.caching:
if url_data.caching and not url_data.cached:
self._cache.checked_add(url_data)
else:
self._cache.in_progress_remove(url_data)
@ -178,7 +180,7 @@ class Consumer (object):
# avoid deadlock by requesting cache data before locking
print >> stderr, _("Status:"),
print_active(self._threader.active_threads())
print_links(self._config['logger'].number)
print_links(self._number)
print_tocheck(self._cache.incoming_len())
print_duration(curtime - start_time)
print >> stderr
@ -200,6 +202,7 @@ class Consumer (object):
"""
Send new url to all configured loggers.
"""
self._number += 1
has_warnings = False
for tag, content in url_data.warnings:
if tag not in self._config["ignorewarnings"]:
@ -210,10 +213,6 @@ class Consumer (object):
self._config['logger'].log_filter_url(url_data, do_print)
for log in self._config['fileoutput']:
log.log_filter_url(url_data, do_print)
# do_filter = (self.linknumber % 1000) == 0
# XXX deadlock!
#if do_filter:
# self.filter_queue(self)
@synchronized(_lock)
def end_log_output (self):