diff options
author | PJ Eby <distutils-sig@python.org> | 2006-12-29 00:34:24 +0000 |
---|---|---|
committer | PJ Eby <distutils-sig@python.org> | 2006-12-29 00:34:24 +0000 |
commit | a2c732458d913a8eac68ad2c19af09167e0ccbfa (patch) | |
tree | ba15125e4359791472f9ec622abede0b4e40c7ef /setuptools/package_index.py | |
parent | df37d56dd31eaa0ea6f11f04c11a52f787a8309e (diff) | |
download | external_python_setuptools-a2c732458d913a8eac68ad2c19af09167e0ccbfa.tar.gz external_python_setuptools-a2c732458d913a8eac68ad2c19af09167e0ccbfa.tar.bz2 external_python_setuptools-a2c732458d913a8eac68ad2c19af09167e0ccbfa.zip |
Removed all special support for Sourceforge mirrors, as Sourceforge's
mirror system now works well for non-browser downloads.
(backport from trunk)
--HG--
branch : setuptools-0.6
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/branches/setuptools-0.6%4053178
Diffstat (limited to 'setuptools/package_index.py')
-rwxr-xr-x | setuptools/package_index.py | 116 |
1 files changed, 17 insertions, 99 deletions
diff --git a/setuptools/package_index.py b/setuptools/package_index.py index ae816609..7e290d0c 100755 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -14,7 +14,7 @@ PYPI_MD5 = re.compile( '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' ) -SF_DOWNLOAD = 'dl.sourceforge.net' + URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() @@ -146,27 +146,11 @@ user_agent = "Python-urllib/%s setuptools/%s" % ( ) - - - - - - - - - - - - - - - - class PackageIndex(Environment): """A distribution index that scans web pages for download URLs""" def __init__(self, index_url="http://www.python.org/pypi", hosts=('*',), - sf_mirrors=None, *args, **kw + *args, **kw ): Environment.__init__(self,*args,**kw) self.index_url = index_url + "/"[:not index_url.endswith('/')] @@ -175,39 +159,14 @@ class PackageIndex(Environment): self.package_pages = {} self.allows = re.compile('|'.join(map(translate,hosts))).match self.to_scan = [] - if sf_mirrors: - if isinstance(sf_mirrors,str): - self.sf_mirrors = map(str.strip, sf_mirrors.split(',')) - else: - self.sf_mirrors = map(str.strip, sf_mirrors) - else: - self.sf_mirrors = () - - def _get_mirrors(self): - mirrors = [] - for mirror in self.sf_mirrors: - if mirror: - if '.' not in mirror: - mirror += '.dl.sourceforge.net' - mirrors.append(mirror) - if not mirrors: - try: - mirrors.extend( - socket.gethostbyname_ex('sf-mirrors.telecommunity.com')[-1] - ) - except socket.error: - # DNS-bl0ck1n9 f1r3w4llz sUx0rs! - mirrors[:] = [SF_DOWNLOAD] - - return mirrors def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" - url = fix_sf_url(url) if url in self.scanned_urls and not retrieve: return + self.scanned_urls[url] = True if not URL_SCHEME(url): self.process_filename(url) @@ -613,47 +572,6 @@ class PackageIndex(Environment): pass # no-op - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers['content-type'].lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _retry_sf_download(self, url, filename): - self.url_ok(url, True) # raises error if not allowed - scheme, server, path, param, query, frag = urlparse.urlparse(url) - - if server == SF_DOWNLOAD: - mirrors = self._get_mirrors() - query = '' - else: - mirrors = [server] - - while mirrors or server != SF_DOWNLOAD: - mirror = random.choice(mirrors) - url = urlparse.urlunparse((scheme,mirror,path,param,query,frag)) - - try: - return self._attempt_download(url, filename) - except (KeyboardInterrupt,SystemExit): - raise - except: - if server != SF_DOWNLOAD: - raise - self.warn("Download failed: %s", sys.exc_info()[1]) - mirrors.remove(mirror) - - raise # fail if no mirror works - - - - - - - - - def open_url(self, url): if url.startswith('file:'): return local_open(url) @@ -689,15 +607,19 @@ class PackageIndex(Environment): elif scheme=='file': return urllib2.url2pathname(urlparse.urlparse(url)[2]) else: - return self._retry_sf_download(url, filename) - - - - + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) def scan_url(self, url): self.process_url(url, True) + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers['content-type'].lower(): + return self._download_html(url, headers, filename) + else: + return filename + def _download_html(self, url, headers, filename): file = open(filename) for line in file: @@ -728,13 +650,12 @@ class PackageIndex(Environment): def warn(self, msg, *args): log.warn(msg, *args) + + + + def fix_sf_url(url): - scheme, server, path, param, query, frag = urlparse.urlparse(url) - if server!='prdownloads.sourceforge.net': - return url - return urlparse.urlunparse( - (scheme, SF_DOWNLOAD, 'sourceforge'+path, param, '', frag) - ) + return url # backward compatibility def local_open(url): """Read a local path, with special support for directories""" @@ -773,7 +694,4 @@ def local_open(url): - - - # this line is a kludge to keep the trailing blank lines for pje's editor |