added get_index_html utility function

git-svn-id: https://linkchecker.svn.sourceforge.net/svnroot/linkchecker/trunk/linkchecker@1837 e7d03fd6-7b0d-0410-9947-9c21f3af8025
This commit is contained in:
calvin 2004-09-20 17:48:52 +00:00
parent 33499da426
commit 6bb7a08eb5

View file

@ -18,10 +18,13 @@
import time
import sys
import os
import cgi
import socket
import select
import re
import urlparse
import urllib
import nntplib
import ftplib
@ -218,3 +221,15 @@ def get_url_from (base_url, recursion_level, consumer,
return klass(base_url, recursion_level, consumer,
parent_url=parent_url, base_ref=base_ref,
line=line, column=column, name=name)
def get_index_html (urls):
"""Construct artificial index.html from given URLs."""
lines = ["<html>", "<body>"]
for entry in urls:
name = cgi.escape(entry)
url = cgi.escape(urllib.quote(entry))
lines.append('<a href="%s">%s</a>' % (url, name))
lines.extend(["</body>", "</html>"])
return os.linesep.join(lines)