2003-07-04 14:24:44 +00:00
|
|
|
# -*- coding: iso-8859-1 -*-
|
2001-03-15 01:19:35 +00:00
|
|
|
"""Handle local file: links"""
|
2004-01-03 14:59:33 +00:00
|
|
|
# Copyright (C) 2000-2004 Bastian Kleineidam
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
|
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
2000-11-11 00:38:04 +00:00
|
|
|
|
2003-06-24 20:57:57 +00:00
|
|
|
import re, os, urlparse
|
2003-01-05 21:00:32 +00:00
|
|
|
from linkcheck import extensions
|
2001-11-20 20:27:25 +00:00
|
|
|
from UrlData import UrlData, ExcList
|
2000-02-26 10:24:46 +00:00
|
|
|
|
2001-11-20 20:27:25 +00:00
|
|
|
# OSError is thrown on Windows when a file is not found
|
|
|
|
|
ExcList.append(OSError)
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
# if file extension was fruitless, look at the content
|
|
|
|
|
contents = {
|
2003-01-05 12:38:34 +00:00
|
|
|
"html": re.compile(r'(?i)<html>.*</html>'),
|
|
|
|
|
"opera" : re.compile(r'Opera Hotlist'),
|
|
|
|
|
# "text" : re.compile(r'[\w\s]+'),
|
2002-03-14 21:18:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_schemes = r"""(
|
|
|
|
|
acap # application configuration access protocol
|
|
|
|
|
|afs # Andrew File System global file names
|
|
|
|
|
|cid # content identifier
|
|
|
|
|
|data # data
|
|
|
|
|
|dav # dav
|
|
|
|
|
|fax # fax
|
|
|
|
|
|imap # internet message access protocol
|
|
|
|
|
|ldap # Lightweight Directory Access Protocol
|
|
|
|
|
|mailserver # Access to data available from mail servers
|
|
|
|
|
|mid # message identifier
|
|
|
|
|
|modem # modem
|
|
|
|
|
|nfs # network file system protocol
|
|
|
|
|
|opaquelocktoken # opaquelocktoken
|
|
|
|
|
|pop # Post Office Protocol v3
|
|
|
|
|
|prospero # Prospero Directory Service
|
|
|
|
|
|rtsp # real time streaming protocol
|
|
|
|
|
|service # service location
|
|
|
|
|
|sip # session initiation protocol
|
|
|
|
|
|tel # telephone
|
|
|
|
|
|tip # Transaction Internet Protocol
|
|
|
|
|
|tn3270 # Interactive 3270 emulation sessions
|
|
|
|
|
|vemmi # versatile multimedia interface
|
|
|
|
|
|wais # Wide Area Information Servers
|
|
|
|
|
|z39\.50r # Z39.50 Retrieval
|
|
|
|
|
|z39\.50s # Z39.50 Session
|
|
|
|
|
|chrome # Mozilla specific
|
|
|
|
|
|find # Mozilla specific
|
|
|
|
|
|clsid # Microsoft specific
|
|
|
|
|
|javascript # JavaScript
|
|
|
|
|
|isbn # ISBN (int. book numbers)
|
|
|
|
|
|https? # HTTP/HTTPS
|
|
|
|
|
|ftp # FTP
|
|
|
|
|
|file # local file
|
|
|
|
|
|telnet # telnet
|
|
|
|
|
|mailto # mailto
|
|
|
|
|
|gopher # gopher
|
|
|
|
|
|s?news # news
|
|
|
|
|
|nntp # news
|
|
|
|
|
)"""
|
|
|
|
|
|
|
|
|
|
class FileUrlData (UrlData):
|
2000-02-26 10:24:46 +00:00
|
|
|
"Url link with file scheme"
|
|
|
|
|
|
2002-05-04 13:27:02 +00:00
|
|
|
def __init__ (self,
|
|
|
|
|
urlName,
|
|
|
|
|
config,
|
|
|
|
|
recursionLevel,
|
|
|
|
|
parentName = None,
|
2002-11-25 22:29:07 +00:00
|
|
|
baseRef = None, line=0, column=0, name=""):
|
2003-08-11 13:19:39 +00:00
|
|
|
super(FileUrlData, self).__init__(urlName, config, recursionLevel,
|
|
|
|
|
parentName=parentName, baseRef=baseRef,
|
|
|
|
|
line=line, column=column, name=name)
|
2003-08-11 11:18:33 +00:00
|
|
|
if not (parentName or baseRef or self.urlName.startswith("file:")):
|
2000-12-22 16:25:32 +00:00
|
|
|
self.urlName = os.path.expanduser(self.urlName)
|
2003-10-10 13:30:44 +00:00
|
|
|
if not self.urlName.startswith("/"):
|
2003-08-11 11:18:33 +00:00
|
|
|
self.urlName = os.getcwd()+"/"+self.urlName
|
2003-10-10 13:30:44 +00:00
|
|
|
self.urlName = "file://"+self.urlName
|
|
|
|
|
self.urlName = self.urlName.replace("\\", "/")
|
|
|
|
|
# transform c:/windows into /c|/windows
|
|
|
|
|
self.urlName = re.sub(r"^file://(/?)([a-zA-Z]):", r"file:///\2|",
|
|
|
|
|
self.urlName)
|
2000-02-26 10:24:46 +00:00
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def buildUrl (self):
|
2003-08-11 13:19:39 +00:00
|
|
|
super(FileUrlData, self).buildUrl()
|
2002-12-06 17:57:57 +00:00
|
|
|
# ignore query and fragment url parts for filesystem urls
|
2002-12-06 16:10:00 +00:00
|
|
|
self.urlparts[3] = self.urlparts[4] = ''
|
2002-12-06 17:57:57 +00:00
|
|
|
self.url = urlparse.urlunsplit(self.urlparts)
|
|
|
|
|
|
|
|
|
|
|
2003-07-28 12:05:24 +00:00
|
|
|
def getCacheKeys (self):
|
2002-12-07 00:45:31 +00:00
|
|
|
# the host in urlparts is lowercase()d
|
2002-12-06 17:57:57 +00:00
|
|
|
if self.urlparts:
|
|
|
|
|
self.urlparts[4] = self.anchor
|
|
|
|
|
key = urlparse.urlunsplit(self.urlparts)
|
|
|
|
|
self.urlparts[4] = ''
|
2003-07-28 12:05:24 +00:00
|
|
|
return [key]
|
|
|
|
|
return []
|
2000-02-26 10:24:46 +00:00
|
|
|
|
|
|
|
|
|
2002-03-14 21:18:52 +00:00
|
|
|
def isHtml (self):
|
2003-10-17 10:53:48 +00:00
|
|
|
if extensions['html'].search(self.url):
|
|
|
|
|
return True
|
|
|
|
|
if contents['html'].search(self.getContent()[:20]):
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def isParseable (self):
|
2002-03-14 21:18:52 +00:00
|
|
|
# guess by extension
|
2003-01-05 21:00:32 +00:00
|
|
|
for ro in extensions.values():
|
2002-03-14 21:18:52 +00:00
|
|
|
if ro.search(self.url):
|
2003-08-11 12:29:11 +00:00
|
|
|
return True
|
2001-08-23 14:06:46 +00:00
|
|
|
# try to read content (can fail, so catch error)
|
2001-08-22 15:09:10 +00:00
|
|
|
try:
|
2002-03-14 21:18:52 +00:00
|
|
|
for ro in contents.values():
|
2002-08-21 21:59:25 +00:00
|
|
|
if ro.search(self.getContent()[:20]):
|
2003-08-11 12:29:11 +00:00
|
|
|
return True
|
2001-08-22 15:09:10 +00:00
|
|
|
except IOError:
|
|
|
|
|
pass
|
2003-08-11 12:29:11 +00:00
|
|
|
return False
|
2001-08-22 13:50:17 +00:00
|
|
|
|
|
|
|
|
|
2002-05-04 13:27:02 +00:00
|
|
|
def parseUrl (self):
|
2003-01-05 21:00:32 +00:00
|
|
|
for key,ro in extensions.items():
|
2002-03-14 21:18:52 +00:00
|
|
|
if ro.search(self.url):
|
2002-05-04 13:27:02 +00:00
|
|
|
return getattr(self, "parse_"+key)()
|
2002-03-14 21:18:52 +00:00
|
|
|
for key,ro in contents.items():
|
2002-08-21 21:59:25 +00:00
|
|
|
if ro.search(self.getContent()[:20]):
|
2002-05-04 13:27:02 +00:00
|
|
|
return getattr(self, "parse_"+key)()
|
2002-06-26 20:46:42 +00:00
|
|
|
return None
|