dead locks fixed

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@117 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2000-06-18 09:57:25 +00:00
parent cd89f5e6a9
commit ae8fec2d78
5 changed files with 72 additions and 59 deletions

6
debian/changelog vendored
View file

@ -4,7 +4,7 @@ linkchecker (1.2.3) unstable; urgency=low
* added some source code documentation
* improved error messages for wrong options
* configuration file options for logger output
* linkchecker.bat installation support for windows
* linkchecker.bat installation support for Windows
* included test suite in distribution
* blacklist output support
* CSV output support
@ -19,11 +19,11 @@ linkchecker (1.2.3) unstable; urgency=low
* Debian package is now lintian clean
* Only catch some exceptions in main check loop so the KeyboardInterrupt
exception propagates through
* Disable threading on non-POSIX systems
* Renice the main thread loop by sleep()ing some time
* New function config.reset()
* fix dead lock with news: urls
-- Bastian Kleineidam <calvin@users.sourceforge.net> Mon, 12 Jun 2000 14:50:44 +0200
-- Bastian Kleineidam <calvin@users.sourceforge.net> Sun, 18 Jun 2000 00:23:44 +0200
linkchecker (1.2.2) unstable; urgency=low

View file

@ -152,14 +152,11 @@ class Configuration(UserDict.UserDict):
self.data["nntpserver"] = os.environ.get("NNTP_SERVER",None)
self.urlCache = {}
self.robotsTxtCache = {}
if os.name=='posix':
try:
import threading
self.enableThreading(10)
except ImportError:
type, value = sys.exc_info()[:2]
self.disableThreading()
else:
try:
import threading
self.enableThreading(10)
except ImportError:
type, value = sys.exc_info()[:2]
self.disableThreading()
def disableThreading(self):
@ -276,9 +273,11 @@ class Configuration(UserDict.UserDict):
def connectNntp_Threads(self):
if not self.data.has_key("nntp"):
self.dataLock.acquire()
self._do_connectNntp()
self.dataLock.release()
try:
self.dataLock.acquire()
self._do_connectNntp()
finally:
self.dataLock.release()
def _do_connectNntp(self):
"""This is done only once per checking task."""
@ -286,7 +285,7 @@ class Configuration(UserDict.UserDict):
timeout = 1
while timeout:
try:
self.data["nntp"] = nntplib.NNTP(self.data["nntpserver"] or "")
self.data["nntp"]=nntplib.NNTP(self.data["nntpserver"] or "")
timeout = 0
except nntplib.error_perm:
value = sys.exc_info()[1]
@ -303,7 +302,8 @@ class Configuration(UserDict.UserDict):
def finished_Threads(self):
time.sleep(0.1)
self.threader.reduceThreads()
return not self.hasMoreUrls() and self.threader.finished()
debug("finished?\n")
return self.threader.finished() and self.urls.empty()
def finish_Threads(self):
self.threader.finish()
@ -318,45 +318,55 @@ class Configuration(UserDict.UserDict):
self.threader.startThread(url.check, (self,))
def urlCache_has_key_Threads(self, key):
self.urlCacheLock.acquire()
ret = self.urlCache.has_key(key)
self.urlCacheLock.release()
return ret
try:
self.urlCacheLock.acquire()
return self.urlCache.has_key(key)
finally:
self.urlCacheLock.release()
def urlCache_get_Threads(self, key):
self.urlCacheLock.acquire()
ret = self.urlCache[key]
self.urlCacheLock.release()
return ret
try:
self.urlCacheLock.acquire()
return self.urlCache[key]
finally:
self.urlCacheLock.release()
def urlCache_set_Threads(self, key, val):
self.urlCacheLock.acquire()
self.urlCache[key] = val
self.urlCacheLock.release()
try:
self.urlCacheLock.acquire()
self.urlCache[key] = val
finally:
self.urlCacheLock.release()
def robotsTxtCache_has_key_Threads(self, key):
self.robotsTxtCacheLock.acquire()
ret = self.robotsTxtCache.has_key(key)
self.robotsTxtCacheLock.release()
return ret
try:
self.robotsTxtCacheLock.acquire()
return self.robotsTxtCache.has_key(key)
finally:
self.robotsTxtCacheLock.release()
def robotsTxtCache_get_Threads(self, key):
self.robotsTxtCacheLock.acquire()
ret = self.robotsTxtCache[key]
self.robotsTxtCacheLock.release()
return ret
try:
self.robotsTxtCacheLock.acquire()
return self.robotsTxtCache[key]
finally:
self.robotsTxtCacheLock.release()
def robotsTxtCache_set_Threads(self, key, val):
self.robotsTxtCacheLock.acquire()
self.robotsTxtCache[key] = val
self.robotsTxtCacheLock.release()
try:
self.robotsTxtCacheLock.acquire()
self.robotsTxtCache[key] = val
finally:
self.robotsTxtCacheLock.release()
def log_newUrl_Threads(self, url):
self.logLock.acquire()
if not self.data["quiet"]: self.data["log"].newUrl(url)
for log in self.data["fileoutput"]:
log.newUrl(url)
self.logLock.release()
try:
self.logLock.acquire()
if not self.data["quiet"]: self.data["log"].newUrl(url)
for log in self.data["fileoutput"]:
log.newUrl(url)
finally:
self.logLock.release()
def read(self, files = []):
if not files:

View file

@ -74,8 +74,9 @@ class StandardLogger:
"""
def __init__(self, **args):
self.errors=0
self.warnings=0
self.errors = 0
self.warnings = 0
self.linknumber = 0
if args.has_key('fileoutput'):
self.fd = open(args['filename'], "w")
elif args.has_key('fd'):
@ -159,7 +160,7 @@ class HtmlLogger(StandardLogger):
"""Logger with HTML output"""
def __init__(self, **args):
StandardLogger.__init__(self, args)
apply(StandardLogger.__init__, (self,), args)
self.colorbackground = args['colorbackground']
self.colorurl = args['colorurl']
self.colorborder = args['colorborder']
@ -268,7 +269,7 @@ class ColoredLogger(StandardLogger):
"""ANSI colorized output"""
def __init__(self, **args):
StandardLogger.__init__(self, args)
apply(StandardLogger.__init__, (self,), args)
self.colorparent = args['colorparent']
self.colorurl = args['colorurl']
self.colorreal = args['colorreal']
@ -375,7 +376,7 @@ class GMLLogger(StandardLogger):
your sitemap graph.
"""
def __init__(self, **args):
StandardLogger.__init__(self, args)
apply(StandardLogger.__init__, (self,), args)
self.nodes = []
def init(self):
@ -433,7 +434,7 @@ class GMLLogger(StandardLogger):
class SQLLogger(StandardLogger):
""" SQL output for PostgreSQL, not tested"""
def __init__(self, **args):
StandardLogger.__init__(self, args)
apply(StandardLogger.__init__, (self,), args)
self.dbname = args['dbname']
self.separator = args['separator']
@ -510,7 +511,7 @@ class CSVLogger(StandardLogger):
separated by a semicolon.
"""
def __init__(self, **args):
StandardLogger.__init__(self, args)
apply(StandardLogger.__init__, (self,), args)
self.separator = args['separator']
def init(self):

View file

@ -243,7 +243,8 @@ class UrlData:
if not (anchor!="" and self.isHtml() and self.valid):
return
self.getContent()
for cur_anchor,line in self.searchInForTag("a", "name"):
for cur_anchor,line in self.searchInForTag(
re.compile(_linkMatcher % ("a", "name"), re.VERBOSE)):
if cur_anchor == anchor:
return
self.setWarning("anchor #"+anchor+" not found")

View file

@ -11,7 +11,6 @@ except ImportError:
except ImportError:
pass
_bs = [
['\023\335\233\203\2323\016',
'\023\335\233\215\324\244\016',
@ -57,7 +56,7 @@ def abbuzze():
config_curses()
my,mx = w.getmaxyx()
b = w.subwin(my-2, mx, 0, 0)
s = w.subwin(my-2, 0)
s = w.subwin(2, mx, my-2, 0)
bs = nassmache(_bs)
ss = nassmache(_ss)
allahopp(s, nassmache(_1))
@ -74,10 +73,11 @@ def abbuzze():
_curses.endwin()
def config_curses():
_curses.nonl() # tell curses not to do NL->CR/NL on output
_curses.noecho() # don't echo input
_curses.cbreak() # take input chars one at a time, no wait for \n
_curses.meta(1) # allow 8-bit chars
_curses.nonl() # tell curses not to do NL->CR/NL on output
_curses.noecho() # don't echo input
_curses.cbreak() # take input chars one at a time, no wait for \n
if hasattr(_curses, "meta"):
_curses.meta(1) # allow 8-bit chars
if hasattr(_curses, "start_color"):
_curses.start_color() # start the colour system
if _curses.has_colors():
@ -120,7 +120,8 @@ def tadaaa(w, l):
def hotzenplotz(w,y,x,l):
for li in l:
w.addstr(y, x, li)
w.move(y,x)
w.addstr(li)
y = y+1
def wischi(w, ls):
@ -144,7 +145,7 @@ def waschi(w, l):
def abspann(w):
w.erase()
w.border()
w.border(0, 0, 0, 0, 0, 0, 0, 0)
w.refresh()
w1 = w.subwin(1, 20, 3, 4)
w2 = w.subwin(1, 20, 5, 4)