2003-07-04 14:24:44 +00:00
|
|
|
# -*- coding: iso-8859-1 -*-
|
2001-03-15 01:19:35 +00:00
|
|
|
"""Handle FTP links"""
|
2004-01-03 14:59:33 +00:00
|
|
|
# Copyright (C) 2000-2004 Bastian Kleineidam
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
2001-03-15 01:19:35 +00:00
|
|
|
#
|
2001-05-23 21:20:44 +00:00
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
|
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
2000-11-11 00:38:04 +00:00
|
|
|
|
2003-01-05 22:55:48 +00:00
|
|
|
import ftplib, i18n
|
2003-01-22 19:50:13 +00:00
|
|
|
from linkcheck import extensions, LinkCheckerError
|
2003-01-11 12:48:14 +00:00
|
|
|
from debug import *
|
2003-01-05 12:38:34 +00:00
|
|
|
from urllib import splitpasswd
|
2002-10-15 00:59:12 +00:00
|
|
|
from ProxyUrlData import ProxyUrlData
|
|
|
|
|
from HttpUrlData import HttpUrlData
|
|
|
|
|
from UrlData import ExcList
|
2000-05-28 23:49:42 +00:00
|
|
|
|
2000-06-11 19:33:01 +00:00
|
|
|
ExcList.extend([
|
|
|
|
|
ftplib.error_reply,
|
|
|
|
|
ftplib.error_temp,
|
|
|
|
|
ftplib.error_perm,
|
|
|
|
|
ftplib.error_proto,
|
|
|
|
|
])
|
2000-02-26 10:24:46 +00:00
|
|
|
|
2002-10-15 00:59:12 +00:00
|
|
|
class FtpUrlData (ProxyUrlData):
|
2000-11-09 12:02:38 +00:00
|
|
|
"""
|
2002-05-04 13:27:02 +00:00
|
|
|
Url link with ftp scheme.
|
2000-11-09 12:02:38 +00:00
|
|
|
"""
|
2002-05-04 13:27:02 +00:00
|
|
|
def checkConnection (self):
|
2002-10-15 00:59:12 +00:00
|
|
|
# proxy support (we support only http)
|
|
|
|
|
self.setProxy(self.config["proxy"].get(self.scheme))
|
|
|
|
|
if self.proxy:
|
|
|
|
|
http = HttpUrlData(self.urlName,
|
|
|
|
|
self.recursionLevel,
|
|
|
|
|
self.config,
|
2002-11-25 22:29:07 +00:00
|
|
|
parentName=self.parentName,
|
|
|
|
|
baseRef=self.baseRef,
|
|
|
|
|
line=self.line,
|
|
|
|
|
column=self.column,
|
|
|
|
|
name=self.name)
|
2002-12-05 02:39:38 +00:00
|
|
|
http.buildUrl()
|
2002-10-15 00:59:12 +00:00
|
|
|
return http.check()
|
2002-12-06 17:21:16 +00:00
|
|
|
# using no proxy here
|
|
|
|
|
# get login credentials
|
|
|
|
|
if self.userinfo:
|
|
|
|
|
_user, _password = splitpasswd(self.userinfo)
|
|
|
|
|
else:
|
|
|
|
|
_user, _password = self.getUserPassword()
|
2000-06-19 08:43:18 +00:00
|
|
|
if _user is None or _password is None:
|
2003-01-22 19:50:13 +00:00
|
|
|
raise LinkCheckerError(i18n._("No user or password found"))
|
2002-12-06 17:21:16 +00:00
|
|
|
self.login(_user, _password)
|
|
|
|
|
filename = self.cwd()
|
|
|
|
|
if filename:
|
|
|
|
|
self.retrieve(filename)
|
2003-01-05 12:38:34 +00:00
|
|
|
return None
|
2002-12-06 17:21:16 +00:00
|
|
|
|
|
|
|
|
|
2002-12-07 00:45:31 +00:00
|
|
|
def isHtml (self):
|
2003-10-17 10:53:48 +00:00
|
|
|
if extensions['html'].search(self.url):
|
|
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def isParseable (self):
|
2003-01-22 22:38:27 +00:00
|
|
|
for ro in extensions.values():
|
2002-12-07 00:45:31 +00:00
|
|
|
if ro.search(self.url):
|
2003-08-11 12:29:11 +00:00
|
|
|
return True
|
|
|
|
|
return False
|
2002-12-07 00:45:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def parseUrl (self):
|
2003-01-22 22:38:27 +00:00
|
|
|
for key,ro in extensions.items():
|
2002-12-07 00:45:31 +00:00
|
|
|
if ro.search(self.url):
|
|
|
|
|
return getattr(self, "parse_"+key)()
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2002-12-06 17:21:16 +00:00
|
|
|
def login (self, _user, _password):
|
|
|
|
|
"""log into ftp server and check the welcome message"""
|
|
|
|
|
# ready to connect
|
2001-09-18 19:16:20 +00:00
|
|
|
try:
|
2002-12-06 01:36:19 +00:00
|
|
|
self.urlConnection = ftplib.FTP()
|
2003-01-09 01:53:05 +00:00
|
|
|
self.urlConnection.set_debuglevel(get_debuglevel())
|
2002-12-06 01:36:19 +00:00
|
|
|
self.urlConnection.connect(self.urlparts[1])
|
|
|
|
|
self.urlConnection.login(_user, _password)
|
2001-09-18 19:16:20 +00:00
|
|
|
except EOFError:
|
2003-01-22 19:50:13 +00:00
|
|
|
raise LinkCheckerError(i18n._("Remote host has closed connection"))
|
2002-12-06 17:21:16 +00:00
|
|
|
if not self.urlConnection.getwelcome():
|
2000-02-26 10:24:46 +00:00
|
|
|
self.closeConnection()
|
2003-01-22 19:50:13 +00:00
|
|
|
raise LinkCheckerError(i18n._("Got no answer from FTP server"))
|
2002-12-06 17:21:16 +00:00
|
|
|
# dont set info anymore, this may change every time we logged in
|
|
|
|
|
#self.setInfo(info)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cwd (self):
|
|
|
|
|
"""change directory to given path"""
|
|
|
|
|
# leeched from webcheck
|
|
|
|
|
dirs = self.urlparts[2].split('/')
|
|
|
|
|
filename = dirs.pop()
|
|
|
|
|
if len(dirs) and not dirs[0]: del dirs[0]
|
|
|
|
|
for d in dirs:
|
|
|
|
|
self.urlConnection.cwd(d)
|
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def retrieve (self, filename):
|
2002-12-06 17:57:57 +00:00
|
|
|
"""initiate download of given filename"""
|
|
|
|
|
# it could be a directory if the trailing slash was forgotten
|
|
|
|
|
try:
|
|
|
|
|
self.urlConnection.cwd(filename)
|
2003-01-05 22:55:48 +00:00
|
|
|
self.setWarning(i18n._("Missing trailing directory slash in ftp url"))
|
2002-12-06 17:57:57 +00:00
|
|
|
return
|
|
|
|
|
except ftplib.error_perm:
|
|
|
|
|
pass
|
2002-12-06 17:21:16 +00:00
|
|
|
self.urlConnection.voidcmd('TYPE I')
|
|
|
|
|
conn, size = self.urlConnection.ntransfercmd('RETR %s'%filename)
|
|
|
|
|
if size:
|
2002-12-06 19:10:03 +00:00
|
|
|
self.dlsize = size
|
|
|
|
|
# dont download data XXX recursion
|
2002-12-06 17:21:16 +00:00
|
|
|
#page = conn.makefile().read(size)
|
|
|
|
|
#else:
|
|
|
|
|
# page = conn.makefile().read()
|
2001-12-10 13:51:07 +00:00
|
|
|
|
|
|
|
|
|
2002-05-04 13:27:02 +00:00
|
|
|
def closeConnection (self):
|
2002-12-06 17:57:57 +00:00
|
|
|
try: self.urlConnection.closet()
|
|
|
|
|
except: pass
|
2000-02-26 10:24:46 +00:00
|
|
|
self.urlConnection = None
|