diff options
author | PJ Eby <distutils-sig@python.org> | 2006-02-08 05:46:54 +0000 |
---|---|---|
committer | PJ Eby <distutils-sig@python.org> | 2006-02-08 05:46:54 +0000 |
commit | 81bd937426b4f47094ae50157b12f82093a8f3ef (patch) | |
tree | c6f7335fd348f13d42c246236f9306f3745e6725 /setuptools/package_index.py | |
parent | ca9ccbf6e73daf553bc41d5abd02f8ebf0d44b45 (diff) | |
download | external_python_setuptools-81bd937426b4f47094ae50157b12f82093a8f3ef.tar.gz external_python_setuptools-81bd937426b4f47094ae50157b12f82093a8f3ef.tar.bz2 external_python_setuptools-81bd937426b4f47094ae50157b12f82093a8f3ef.zip |
The ``--find-links`` option previously scanned all supplied URLs and
directories as early as possible, but now only directories and direct
archive links are scanned immediately. URLs are not retrieved unless a
package search was already going to go online due to a package not being
available locally, or due to the use of the ``--update`` or ``-U``
option. Also, fixed the ``develop`` command ignoring ``--find-links``.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4042262
Diffstat (limited to 'setuptools/package_index.py')
-rwxr-xr-x | setuptools/package_index.py | 98 |
1 files changed, 49 insertions, 49 deletions
diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 669692b6..c02f3b4e 100755 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -131,6 +131,7 @@ class PackageIndex(Environment): self.fetched_urls = {} self.package_pages = {} self.allows = re.compile('|'.join(map(translate,hosts))).match + self.to_scan = [] def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" @@ -139,18 +140,8 @@ class PackageIndex(Environment): return self.scanned_urls[url] = True if not URL_SCHEME(url): - # process filenames or directories - if os.path.isfile(url): - map(self.add, distros_for_filename(url)) - return # no need to retrieve anything - elif os.path.isdir(url): - url = os.path.realpath(url) - for item in os.listdir(url): - self.process_url(os.path.join(url,item)) - return - else: - self.warn("Not found: %s", url) - return + self.process_filename(url) + return else: dists = list(distros_for_url(url)) if dists: @@ -170,6 +161,7 @@ class PackageIndex(Environment): f = self.open_url(url) self.fetched_urls[url] = self.fetched_urls[f.url] = True + if 'html' not in f.headers['content-type'].lower(): f.close() # not html, we can't process it return @@ -184,6 +176,21 @@ class PackageIndex(Environment): link = urlparse.urljoin(base, match.group(1)) self.process_url(link) + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", url) + return + + if os.path.isdir(fn): + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path,item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + map(self.add, dists) def url_ok(self, url, fatal=False): if self.allows(urlparse.urlparse(url)[1]): @@ -196,13 +203,6 @@ class PackageIndex(Environment): - - - - - - - def process_index(self,url,page): """Process the contents of a PyPI page""" def scan(link): @@ -260,9 +260,11 @@ class PackageIndex(Environment): def find_packages(self, requirement): self.scan_url(self.index_url + requirement.unsafe_name+'/') + if not self.package_pages.get(requirement.key): # Fall back to safe version of the name self.scan_url(self.index_url + requirement.project_name+'/') + if not self.package_pages.get(requirement.key): # We couldn't find the target package, so search the index page too self.warn( @@ -276,15 +278,13 @@ class PackageIndex(Environment): self.scan_url(url) def obtain(self, requirement, installer=None): - self.find_packages(requirement) + self.prescan(); self.find_packages(requirement) for dist in self[requirement.key]: if dist in requirement: return dist self.debug("%s does not match %s", requirement, dist) return super(PackageIndex, self).obtain(requirement,installer) - - def check_md5(self, cs, info, filename, tfp): if re.match('md5=[0-9a-f]{32}$', info): self.debug("Validating md5 checksum for %s", filename) @@ -296,26 +296,26 @@ class PackageIndex(Environment): "; possible download problem?" ) + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) - - - - - - - - - - - - - - - - - - - + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + map(self.scan_url, self.to_scan) + self.to_scan = None # from now on, go ahead and process immediately @@ -409,13 +409,17 @@ class PackageIndex(Environment): ) if force_scan: + self.prescan() self.find_packages(requirement) + + dist = find(requirement) + if dist is None and self.to_scan is not None: + self.prescan() dist = find(requirement) - else: + + if dist is None and not force_scan: + self.find_packages(requirement) dist = find(requirement) - if dist is None: - self.find_packages(requirement) - dist = find(requirement) if dist is None: self.warn( @@ -445,10 +449,6 @@ class PackageIndex(Environment): - - - - def gen_setup(self, filename, fragment, tmpdir): match = EGG_FRAGMENT.match(fragment); #import pdb; pdb.set_trace() dists = match and [d for d in |