aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPhilip Thiem <ptthiem@gmail.com>2013-07-20 18:10:50 -0500
committerPhilip Thiem <ptthiem@gmail.com>2013-07-20 18:10:50 -0500
commita6e7ef0ae1e0ea1f147a2196efe7f2b82c301fe2 (patch)
tree8f73e0285965d86cbb6b8d8bc599789fd4a924ef
parent37c48a4da11b40a5a8a3c801525b637bb2934df1 (diff)
parent70067439d3b2b53cf2112ed0faf52c30b30ef3cd (diff)
downloadexternal_python_setuptools-a6e7ef0ae1e0ea1f147a2196efe7f2b82c301fe2.tar.gz
external_python_setuptools-a6e7ef0ae1e0ea1f147a2196efe7f2b82c301fe2.tar.bz2
external_python_setuptools-a6e7ef0ae1e0ea1f147a2196efe7f2b82c301fe2.zip
Merge with default
--HG-- extra : rebase_source : 15517dca4272e2b088930cb5599f5822cef13bae
-rw-r--r--.hgtags22
-rw-r--r--CHANGES.txt376
-rw-r--r--CONTRIBUTORS.txt1
-rwxr-xr-xREADME.txt14
-rw-r--r--_markerlib/markers.py4
-rw-r--r--docs/conf.py5
-rw-r--r--ez_setup.py8
-rw-r--r--pkg_resources.py42
-rw-r--r--release.py303
-rwxr-xr-xsetup.py70
-rw-r--r--setuptools.egg-info/entry_points.txt124
-rw-r--r--setuptools/__init__.py2
-rw-r--r--setuptools/_backport/__init__.py0
-rw-r--r--setuptools/_backport/hashlib/__init__.py146
-rw-r--r--setuptools/_backport/hashlib/_sha.py359
-rw-r--r--setuptools/_backport/hashlib/_sha256.py260
-rw-r--r--setuptools/_backport/hashlib/_sha512.py288
-rwxr-xr-xsetuptools/package_index.py108
-rw-r--r--setuptools/py24compat.py6
-rwxr-xr-xsetuptools/sandbox.py6
-rw-r--r--setuptools/ssl_support.py9
-rw-r--r--setuptools/tests/test_markerlib.py4
-rw-r--r--setuptools/tests/test_packageindex.py39
-rw-r--r--tests/test_pkg_resources.py10
24 files changed, 1652 insertions, 554 deletions
diff --git a/.hgtags b/.hgtags
index 5b6d4cd9..bb4ffc14 100644
--- a/.hgtags
+++ b/.hgtags
@@ -65,3 +65,25 @@ d04c05f035e3a5636006fc34f4be7e6c77035d17 0.7.2
d212e48e0cef689acba57ed017289c027660b23c 0.7.3
74c6c12268059986f9cc0b535399594f1d131201 0.8b1
85640475dda0621f20e11db0995fa07f51744a98 0.7.4
+b57e5ba934767dd498669b17551678081b3047b5 0.6.46
+dd5bbc116c53d3732d22f983e7ca6d8cfabd3b08 0.7.5
+512744f3f306aea0fdde4cfd600af8b2d6e773e7 0.8b2
+8af9839a76407eebf3610fcd3e7973f1625abaa2 0.8b3
+ee2c967017024197b38e39ced852808265387a4b 0.6.47
+48d3d26cbea68e21c96e51f01092e8fdead5cd60 0.7.6
+5b3c7981a02b4a86af1b10ae16492899b515d485 0.8b4
+cae9127e0534fc46d7ddbc11f68dc88fd9311459 0.6.48
+1506fa538fff01e70424530a32a44e070720cf3c 0.7.7
+5679393794978a1d3e1e087472b8a0fdf3d8423c 0.8b5
+26f59ec0f0f69714d28a891aaad048e3b9fcd6f7 0.8b6
+f657df1f1ed46596d236376649c99a470662b4ba 0.6.49
+236de1de68b14230036147c7c9e7c09b215b53ee 0.7.8
+979d598822bc64b05fb177a2ba221e75ee5b44d3 0.8b7
+e3d70539e79f39a97f69674ab038661961a1eb43 0.8
+3078b1e566399bf0c5590f3528df03d0c23a0777 0.9
+9e5a8f734662dd36e6fd6e4ba9031d0e2d294632 0.9.1
+37444bb32e172aaacbc0aeafdf5a778ee471723d 0.9.2
+3e9d2e89de3aa499382d6be2ec8b64d2a29f7f13 0.9.3
+1aef141fc968113e4c521d1edf6ea863c4ff7e00 0.9.4
+88e3d6788facbb2dd6467a23c4f35529a5ce20a1 0.9.5
+acc6c5d61d0f82040c237ac7ea010c0fc9e67d66 0.9.6
diff --git a/CHANGES.txt b/CHANGES.txt
index bed92b35..6e993ada 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -2,6 +2,52 @@
CHANGES
=======
+-----
+0.9.6
+-----
+
+* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a `.name`
+ attribute.
+
+-----
+0.9.5
+-----
+
+* Python #17980: Fix security vulnerability in SSL certificate validation.
+
+-----
+0.9.4
+-----
+
+* Issue #43: Fix issue (introduced in 0.9.1) with version resolution when
+ upgrading over other releases of Setuptools.
+
+-----
+0.9.3
+-----
+
+* Issue #42: Fix new ``AttributeError`` introduced in last fix.
+
+-----
+0.9.2
+-----
+
+* Issue #42: Fix regression where blank checksums would trigger an
+ ``AttributeError``.
+
+-----
+0.9.1
+-----
+
+* Distribute #386: Allow other positional and keyword arguments to os.open.
+* Corrected dependency on certifi mis-referenced in 0.9.
+
+---
+0.9
+---
+
+* `package_index` now validates hashes other than MD5 in download links.
+
---
0.8
---
@@ -10,10 +56,33 @@ CHANGES
conversion.
-----
+0.7.8
+-----
+
+* Distribute #375: Yet another fix for yet another regression.
+
+-----
+0.7.7
+-----
+
+* Distribute #375: Repair AttributeError created in last release (redo).
+* Issue #30: Added test for get_cache_path.
+
+-----
+0.7.6
+-----
+
+* Distribute #375: Repair AttributeError created in last release.
+
+-----
0.7.5
-----
* Issue #21: Restore Python 2.4 compatibility in ``test_easy_install``.
+* Distribute #375: Merged additional warning from Distribute 0.6.46.
+* Now honor the environment variable
+ ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now
+ deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``.
-----
0.7.4
@@ -66,10 +135,40 @@ Added several features that were slated for setuptools 0.6c12:
* Issue #3: Fixed NameError in SSL support.
------
+0.6.49
+------
+
+* Move warning check in ``get_cache_path`` to follow the directory creation
+ to avoid errors when the cache path does not yet exist. Fixes the error
+ reported in Distribute #375.
+
+------
+0.6.48
+------
+
+* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in
+ 0.6.46 (redo).
+
+------
+0.6.47
+------
+
+* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in
+ 0.6.46.
+
+------
+0.6.46
+------
+
+* Distribute #375: Issue a warning if the PYTHON_EGG_CACHE or otherwise
+ customized egg cache location specifies a directory that's group- or
+ world-writable.
+
+------
0.6.45
------
-* Issue #379: ``distribute_setup.py`` now traps VersionConflict as well,
+* Distribute #379: ``distribute_setup.py`` now traps VersionConflict as well,
restoring ability to upgrade from an older setuptools version.
------
@@ -83,21 +182,21 @@ Added several features that were slated for setuptools 0.6c12:
0.6.43
------
-* Issue #378: Restore support for Python 2.4 Syntax (regression in 0.6.42).
+* Distribute #378: Restore support for Python 2.4 Syntax (regression in 0.6.42).
------
0.6.42
------
* External links finder no longer yields duplicate links.
-* Issue #337: Moved site.py to setuptools/site-patch.py (graft of very old
+* Distribute #337: Moved site.py to setuptools/site-patch.py (graft of very old
patch from setuptools trunk which inspired PR #31).
------
0.6.41
------
-* Issue #27: Use public api for loading resources from zip files rather than
+* Distribute #27: Use public api for loading resources from zip files rather than
the private method `_zip_directory_cache`.
* Added a new function ``easy_install.get_win_launcher`` which may be used by
third-party libraries such as buildout to get a suitable script launcher.
@@ -106,7 +205,7 @@ Added several features that were slated for setuptools 0.6c12:
0.6.40
------
-* Issue #376: brought back cli.exe and gui.exe that were deleted in the
+* Distribute #376: brought back cli.exe and gui.exe that were deleted in the
previous release.
------
@@ -117,7 +216,7 @@ Added several features that were slated for setuptools 0.6c12:
* Fix possible issue in GUI launchers where the subsystem was not supplied to
the linker.
* Launcher build script now refactored for robustness.
-* Issue #375: Resources extracted from a zip egg to the file system now also
+* Distribute #375: Resources extracted from a zip egg to the file system now also
check the contents of the file against the zip contents during each
invocation of get_resource_filename.
@@ -125,13 +224,13 @@ Added several features that were slated for setuptools 0.6c12:
0.6.38
------
-* Issue #371: The launcher manifest file is now installed properly.
+* Distribute #371: The launcher manifest file is now installed properly.
------
0.6.37
------
-* Issue #143: Launcher scripts, including easy_install itself, are now
+* Distribute #143: Launcher scripts, including easy_install itself, are now
accompanied by a manifest on 32-bit Windows environments to avoid the
Installer Detection Technology and thus undesirable UAC elevation described
in `this Microsoft article
@@ -141,8 +240,7 @@ Added several features that were slated for setuptools 0.6c12:
0.6.36
------
-* Pull Request #35: In `Buildout issue 64
- <https://github.com/buildout/buildout/issues/64>`_, it was reported that
+* Pull Request #35: In Buildout #64, it was reported that
under Python 3, installation of distutils scripts could attempt to copy
the ``__pycache__`` directory as a file, causing an error, apparently only
under Windows. Easy_install now skips all directories when processing
@@ -156,7 +254,7 @@ Added several features that were slated for setuptools 0.6c12:
Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in
how it parses version numbers.
-* Issue #278: Restored compatibility with distribute 0.6.22 and setuptools
+* Distribute #278: Restored compatibility with distribute 0.6.22 and setuptools
0.6. Updated the documentation to match more closely with the version
parsing as intended in setuptools 0.6.
@@ -164,7 +262,7 @@ how it parses version numbers.
0.6.34
------
-* Issue #341: 0.6.33 fails to build under Python 2.4.
+* Distribute #341: 0.6.33 fails to build under Python 2.4.
------
0.6.33
@@ -173,11 +271,11 @@ how it parses version numbers.
* Fix 2 errors with Jython 2.5.
* Fix 1 failure with Jython 2.5 and 2.7.
* Disable workaround for Jython scripts on Linux systems.
-* Issue #336: `setup.py` no longer masks failure exit code when tests fail.
+* Distribute #336: `setup.py` no longer masks failure exit code when tests fail.
* Fix issue in pkg_resources where try/except around a platform-dependent
import would trigger hook load failures on Mercurial. See pull request 32
for details.
-* Issue #341: Fix a ResourceWarning.
+* Distribute #341: Fix a ResourceWarning.
------
0.6.32
@@ -185,19 +283,18 @@ how it parses version numbers.
* Fix test suite with Python 2.6.
* Fix some DeprecationWarnings and ResourceWarnings.
-* Issue #335: Backed out `setup_requires` superceding installed requirements
+* Distribute #335: Backed out `setup_requires` superceding installed requirements
until regression can be addressed.
------
0.6.31
------
-* Issue #303: Make sure the manifest only ever contains UTF-8 in Python 3.
-* Issue #329: Properly close files created by tests for compatibility with
+* Distribute #303: Make sure the manifest only ever contains UTF-8 in Python 3.
+* Distribute #329: Properly close files created by tests for compatibility with
Jython.
-* Work around Jython bugs `#1980 <http://bugs.jython.org/issue1980>`_ and
- `#1981 <http://bugs.jython.org/issue1981>`_.
-* Issue #334: Provide workaround for packages that reference `sys.__stdout__`
+* Work around Jython #1980 and Jython #1981.
+* Distribute #334: Provide workaround for packages that reference `sys.__stdout__`
such as numpy does. This change should address
`virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
as the system encoding is UTF-8 or the IO encoding is specified in the
@@ -206,7 +303,7 @@ how it parses version numbers.
PYTHONIOENCODING=utf8 pip install numpy
* Fix for encoding issue when installing from Windows executable on Python 3.
-* Issue #323: Allow `setup_requires` requirements to supercede installed
+* Distribute #323: Allow `setup_requires` requirements to supercede installed
requirements. Added some new keyword arguments to existing pkg_resources
methods. Also had to updated how __path__ is handled for namespace packages
to ensure that when a new egg distribution containing a namespace package is
@@ -218,7 +315,7 @@ how it parses version numbers.
0.6.30
------
-* Issue #328: Clean up temporary directories in distribute_setup.py.
+* Distribute #328: Clean up temporary directories in distribute_setup.py.
* Fix fatal bug in distribute_setup.py.
------
@@ -226,28 +323,28 @@ how it parses version numbers.
------
* Pull Request #14: Honor file permissions in zip files.
-* Issue #327: Merged pull request #24 to fix a dependency problem with pip.
+* Distribute #327: Merged pull request #24 to fix a dependency problem with pip.
* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
to produce uploadable documentation.
-* Issue #326: `upload_docs` provided mangled auth credentials under Python 3.
-* Issue #320: Fix check for "createable" in distribute_setup.py.
-* Issue #305: Remove a warning that was triggered during normal operations.
-* Issue #311: Print metadata in UTF-8 independent of platform.
-* Issue #303: Read manifest file with UTF-8 encoding under Python 3.
-* Issue #301: Allow to run tests of namespace packages when using 2to3.
-* Issue #304: Prevent import loop in site.py under Python 3.3.
-* Issue #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
-* Issue #299: The develop command didn't work on Python 3, when using 2to3,
+* Distribute #326: `upload_docs` provided mangled auth credentials under Python 3.
+* Distribute #320: Fix check for "createable" in distribute_setup.py.
+* Distribute #305: Remove a warning that was triggered during normal operations.
+* Distribute #311: Print metadata in UTF-8 independent of platform.
+* Distribute #303: Read manifest file with UTF-8 encoding under Python 3.
+* Distribute #301: Allow to run tests of namespace packages when using 2to3.
+* Distribute #304: Prevent import loop in site.py under Python 3.3.
+* Distribute #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
+* Distribute #299: The develop command didn't work on Python 3, when using 2to3,
as the egg link would go to the Python 2 source. Linking to the 2to3'd code
in build/lib makes it work, although you will have to rebuild the module
before testing it.
-* Issue #306: Even if 2to3 is used, we build in-place under Python 2.
-* Issue #307: Prints the full path when .svn/entries is broken.
-* Issue #313: Support for sdist subcommands (Python 2.7)
-* Issue #314: test_local_index() would fail an OS X.
-* Issue #310: Non-ascii characters in a namespace __init__.py causes errors.
-* Issue #218: Improved documentation on behavior of `package_data` and
+* Distribute #306: Even if 2to3 is used, we build in-place under Python 2.
+* Distribute #307: Prints the full path when .svn/entries is broken.
+* Distribute #313: Support for sdist subcommands (Python 2.7)
+* Distribute #314: test_local_index() would fail an OS X.
+* Distribute #310: Non-ascii characters in a namespace __init__.py causes errors.
+* Distribute #218: Improved documentation on behavior of `package_data` and
`include_package_data`. Files indicated by `package_data` are now included
in the manifest.
* `distribute_setup.py` now allows a `--download-base` argument for retrieving
@@ -257,10 +354,10 @@ how it parses version numbers.
0.6.28
------
-* Issue #294: setup.py can now be invoked from any directory.
+* Distribute #294: setup.py can now be invoked from any directory.
* Scripts are now installed honoring the umask.
* Added support for .dist-info directories.
-* Issue #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
+* Distribute #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
Python 3.3.
------
@@ -271,15 +368,15 @@ how it parses version numbers.
* Distribute now recognizes README.rst as a standard, default readme file.
* Exclude 'encodings' modules when removing modules from sys.modules.
Workaround for #285.
-* Issue #231: Don't fiddle with system python when used with buildout
+* Distribute #231: Don't fiddle with system python when used with buildout
(bootstrap.py)
------
0.6.26
------
-* Issue #183: Symlinked files are now extracted from source distributions.
-* Issue #227: Easy_install fetch parameters are now passed during the
+* Distribute #183: Symlinked files are now extracted from source distributions.
+* Distribute #227: Easy_install fetch parameters are now passed during the
installation of a source distribution; now fulfillment of setup_requires
dependencies will honor the parameters passed to easy_install.
@@ -287,65 +384,65 @@ how it parses version numbers.
0.6.25
------
-* Issue #258: Workaround a cache issue
-* Issue #260: distribute_setup.py now accepts the --user parameter for
+* Distribute #258: Workaround a cache issue
+* Distribute #260: distribute_setup.py now accepts the --user parameter for
Python 2.6 and later.
-* Issue #262: package_index.open_with_auth no longer throws LookupError
+* Distribute #262: package_index.open_with_auth no longer throws LookupError
on Python 3.
-* Issue #269: AttributeError when an exception occurs reading Manifest.in
+* Distribute #269: AttributeError when an exception occurs reading Manifest.in
on late releases of Python.
-* Issue #272: Prevent TypeError when namespace package names are unicode
+* Distribute #272: Prevent TypeError when namespace package names are unicode
and single-install-externally-managed is used. Also fixes PIP issue
449.
-* Issue #273: Legacy script launchers now install with Python2/3 support.
+* Distribute #273: Legacy script launchers now install with Python2/3 support.
------
0.6.24
------
-* Issue #249: Added options to exclude 2to3 fixers
+* Distribute #249: Added options to exclude 2to3 fixers
------
0.6.23
------
-* Issue #244: Fixed a test
-* Issue #243: Fixed a test
-* Issue #239: Fixed a test
-* Issue #240: Fixed a test
-* Issue #241: Fixed a test
-* Issue #237: Fixed a test
-* Issue #238: easy_install now uses 64bit executable wrappers on 64bit Python
-* Issue #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation
-* Issue #207: Windows cli and gui wrappers pass CTRL-C to child python process
-* Issue #227: easy_install now passes its arguments to setup.py bdist_egg
-* Issue #225: Fixed a NameError on Python 2.5, 2.4
+* Distribute #244: Fixed a test
+* Distribute #243: Fixed a test
+* Distribute #239: Fixed a test
+* Distribute #240: Fixed a test
+* Distribute #241: Fixed a test
+* Distribute #237: Fixed a test
+* Distribute #238: easy_install now uses 64bit executable wrappers on 64bit Python
+* Distribute #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation
+* Distribute #207: Windows cli and gui wrappers pass CTRL-C to child python process
+* Distribute #227: easy_install now passes its arguments to setup.py bdist_egg
+* Distribute #225: Fixed a NameError on Python 2.5, 2.4
------
0.6.21
------
-* Issue #225: FIxed a regression on py2.4
+* Distribute #225: FIxed a regression on py2.4
------
0.6.20
------
-* Issue #135: Include url in warning when processing URLs in package_index.
-* Issue #212: Fix issue where easy_instal fails on Python 3 on windows installer.
-* Issue #213: Fix typo in documentation.
+* Distribute #135: Include url in warning when processing URLs in package_index.
+* Distribute #212: Fix issue where easy_instal fails on Python 3 on windows installer.
+* Distribute #213: Fix typo in documentation.
------
0.6.19
------
-* Issue 206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
+* Distribute #206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
------
0.6.18
------
-* Issue 210: Fixed a regression introduced by Issue 204 fix.
+* Distribute #210: Fixed a regression introduced by Distribute #204 fix.
------
0.6.17
@@ -354,21 +451,21 @@ how it parses version numbers.
* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
variable to allow to disable installation of easy_install-${version} script.
* Support Python >=3.1.4 and >=3.2.1.
-* Issue 204: Don't try to import the parent of a namespace package in
+* Distribute #204: Don't try to import the parent of a namespace package in
declare_namespace
-* Issue 196: Tolerate responses with multiple Content-Length headers
-* Issue 205: Sandboxing doesn't preserve working_set. Leads to setup_requires
+* Distribute #196: Tolerate responses with multiple Content-Length headers
+* Distribute #205: Sandboxing doesn't preserve working_set. Leads to setup_requires
problems.
------
0.6.16
------
-* Builds sdist gztar even on Windows (avoiding Issue 193).
-* Issue 192: Fixed metadata omitted on Windows when package_dir
+* Builds sdist gztar even on Windows (avoiding Distribute #193).
+* Distribute #192: Fixed metadata omitted on Windows when package_dir
specified with forward-slash.
-* Issue 195: Cython build support.
-* Issue 200: Issues with recognizing 64-bit packages on Windows.
+* Distribute #195: Cython build support.
+* Distribute #200: Issues with recognizing 64-bit packages on Windows.
------
0.6.15
@@ -376,49 +473,49 @@ how it parses version numbers.
* Fixed typo in bdist_egg
* Several issues under Python 3 has been solved.
-* Issue 146: Fixed missing DLL files after easy_install of windows exe package.
+* Distribute #146: Fixed missing DLL files after easy_install of windows exe package.
------
0.6.14
------
-* Issue 170: Fixed unittest failure. Thanks to Toshio.
-* Issue 171: Fixed race condition in unittests cause deadlocks in test suite.
-* Issue 143: Fixed a lookup issue with easy_install.
+* Distribute #170: Fixed unittest failure. Thanks to Toshio.
+* Distribute #171: Fixed race condition in unittests cause deadlocks in test suite.
+* Distribute #143: Fixed a lookup issue with easy_install.
Thanks to David and Zooko.
-* Issue 174: Fixed the edit mode when its used with setuptools itself
+* Distribute #174: Fixed the edit mode when its used with setuptools itself
------
0.6.13
------
-* Issue 160: 2.7 gives ValueError("Invalid IPv6 URL")
-* Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv
-* Issue 163: scan index links before external links, and don't use the md5 when
+* Distribute #160: 2.7 gives ValueError("Invalid IPv6 URL")
+* Distribute #150: Fixed using ~/.local even in a --no-site-packages virtualenv
+* Distribute #163: scan index links before external links, and don't use the md5 when
comparing two distributions
------
0.6.12
------
-* Issue 149: Fixed various failures on 2.3/2.4
+* Distribute #149: Fixed various failures on 2.3/2.4
------
0.6.11
------
* Found another case of SandboxViolation - fixed
-* Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings
+* Distribute #15 and Distribute #48: Introduced a socket timeout of 15 seconds on url openings
* Added indexsidebar.html into MANIFEST.in
-* Issue 108: Fixed TypeError with Python3.1
-* Issue 121: Fixed --help install command trying to actually install.
-* Issue 112: Added an os.makedirs so that Tarek's solution will work.
-* Issue 133: Added --no-find-links to easy_install
+* Distribute #108: Fixed TypeError with Python3.1
+* Distribute #121: Fixed --help install command trying to actually install.
+* Distribute #112: Added an os.makedirs so that Tarek's solution will work.
+* Distribute #133: Added --no-find-links to easy_install
* Added easy_install --user
-* Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account
-* Issue 134: removed spurious UserWarnings. Patch by VanLindberg
-* Issue 138: cant_write_to_target error when setup_requires is used.
-* Issue 147: respect the sys.dont_write_bytecode flag
+* Distribute #100: Fixed develop --user not taking '.' in PYTHONPATH into account
+* Distribute #134: removed spurious UserWarnings. Patch by VanLindberg
+* Distribute #138: cant_write_to_target error when setup_requires is used.
+* Distribute #147: respect the sys.dont_write_bytecode flag
------
0.6.10
@@ -432,27 +529,27 @@ how it parses version numbers.
0.6.9
-----
-* Issue 90: unknown setuptools version can be added in the working set
-* Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore
+* Distribute #90: unknown setuptools version can be added in the working set
+* Distribute #87: setupt.py doesn't try to convert distribute_setup.py anymore
Initial Patch by arfrever.
-* Issue 89: added a side bar with a download link to the doc.
-* Issue 86: fixed missing sentence in pkg_resources doc.
+* Distribute #89: added a side bar with a download link to the doc.
+* Distribute #86: fixed missing sentence in pkg_resources doc.
* Added a nicer error message when a DistributionNotFound is raised.
-* Issue 80: test_develop now works with Python 3.1
-* Issue 93: upload_docs now works if there is an empty sub-directory.
-* Issue 70: exec bit on non-exec files
-* Issue 99: now the standalone easy_install command doesn't uses a
+* Distribute #80: test_develop now works with Python 3.1
+* Distribute #93: upload_docs now works if there is an empty sub-directory.
+* Distribute #70: exec bit on non-exec files
+* Distribute #99: now the standalone easy_install command doesn't uses a
"setup.cfg" if any exists in the working directory. It will use it
only if triggered by ``install_requires`` from a setup.py call
(install, develop, etc).
-* Issue 101: Allowing ``os.devnull`` in Sandbox
-* Issue 92: Fixed the "no eggs" found error with MacPort
+* Distribute #101: Allowing ``os.devnull`` in Sandbox
+* Distribute #92: Fixed the "no eggs" found error with MacPort
(platform.mac_ver() fails)
-* Issue 103: test_get_script_header_jython_workaround not run
+* Distribute #103: test_get_script_header_jython_workaround not run
anymore under py3 with C or POSIX local. Contributed by Arfrever.
-* Issue 104: remvoved the assertion when the installation fails,
+* Distribute #104: remvoved the assertion when the installation fails,
with a nicer message for the end user.
-* Issue 100: making sure there's no SandboxViolation when
+* Distribute #100: making sure there's no SandboxViolation when
the setup script patches setuptools.
-----
@@ -466,8 +563,8 @@ how it parses version numbers.
0.6.7
-----
-* Issue 58: Added --user support to the develop command
-* Issue 11: Generated scripts now wrap their call to the script entry point
+* Distribute #58: Added --user support to the develop command
+* Distribute #11: Generated scripts now wrap their call to the script entry point
in the standard "if name == 'main'"
* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
can drive an installation that doesn't patch a global setuptools.
@@ -475,17 +572,17 @@ how it parses version numbers.
http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
and determined that it no longer applies. Distribute should work fine with
Unladen Swallow 2009Q3.
-* Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a
+* Distribute #21: Allow PackageIndex.open_url to gracefully handle all cases of a
httplib.HTTPException instead of just InvalidURL and BadStatusLine.
* Removed virtual-python.py from this distribution and updated documentation
to point to the actively maintained virtualenv instead.
-* Issue 64: use_setuptools no longer rebuilds the distribute egg every
+* Distribute #64: use_setuptools no longer rebuilds the distribute egg every
time it is run
* use_setuptools now properly respects the requested version
* use_setuptools will no longer try to import a distribute egg for the
wrong Python version
-* Issue 74: no_fake should be True by default.
-* Issue 72: avoid a bootstrapping issue with easy_install -U
+* Distribute #74: no_fake should be True by default.
+* Distribute #72: avoid a bootstrapping issue with easy_install -U
-----
0.6.6
@@ -498,10 +595,10 @@ how it parses version numbers.
0.6.5
-----
-* Issue 65: cli.exe and gui.exe are now generated at build time,
+* Distribute #65: cli.exe and gui.exe are now generated at build time,
depending on the platform in use.
-* Issue 67: Fixed doc typo (PEP 381/382)
+* Distribute #67: Fixed doc typo (PEP 381/382)
* Distribute no longer shadows setuptools if we require a 0.7-series
setuptools. And an error is raised when installing a 0.7 setuptools with
@@ -518,10 +615,10 @@ how it parses version numbers.
-----
* Added the generation of `distribute_setup_3k.py` during the release.
- This closes issue #52.
+ This closes Distribute #52.
* Added an upload_docs command to easily upload project documentation to
- PyPI's https://pythonhosted.org. This close issue #56.
+ PyPI's https://pythonhosted.org. This close issue Distribute #56.
* Fixed a bootstrap bug on the use_setuptools() API.
@@ -547,29 +644,29 @@ setuptools
==========
* Added Python 3 support; see docs/python3.txt.
- This closes http://bugs.python.org/setuptools/issue39.
+ This closes Old Setuptools #39.
* Added option to run 2to3 automatically when installing on Python 3.
- This closes issue #31.
+ This closes issue Distribute #31.
* Fixed invalid usage of requirement.parse, that broke develop -d.
- This closes http://bugs.python.org/setuptools/issue44.
+ This closes Old Setuptools #44.
* Fixed script launcher for 64-bit Windows.
- This closes http://bugs.python.org/setuptools/issue2.
+ This closes Old Setuptools #2.
* KeyError when compiling extensions.
- This closes http://bugs.python.org/setuptools/issue41.
+ This closes Old Setuptools #41.
bootstrapping
=============
-* Fixed bootstrap not working on Windows. This closes issue #49.
+* Fixed bootstrap not working on Windows. This closes issue Distribute #49.
-* Fixed 2.6 dependencies. This closes issue #50.
+* Fixed 2.6 dependencies. This closes issue Distribute #50.
* Make sure setuptools is patched when running through easy_install
- This closes http://bugs.python.org/setuptools/issue40.
+ This closes Old Setuptools #40.
-----
0.6.1
@@ -579,14 +676,13 @@ setuptools
==========
* package_index.urlopen now catches BadStatusLine and malformed url errors.
- This closes issue #16 and issue #18.
+ This closes Distribute #16 and Distribute #18.
-* zip_ok is now False by default. This closes
- http://bugs.python.org/setuptools/issue33.
+* zip_ok is now False by default. This closes Old Setuptools #33.
-* Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
+* Fixed invalid URL error catching. Old Setuptools #20.
-* Fixed invalid bootstraping with easy_install installation (issue #40).
+* Fixed invalid bootstraping with easy_install installation (Distribute #40).
Thanks to Florian Schulze for the help.
* Removed buildout/bootstrap.py. A new repository will create a specific
@@ -598,7 +694,7 @@ bootstrapping
* The boostrap process leave setuptools alone if detected in the system
and --root or --prefix is provided, but is not in the same location.
- This closes issue #10.
+ This closes Distribute #10.
---
0.6
@@ -608,18 +704,18 @@ setuptools
==========
* Packages required at build time where not fully present at install time.
- This closes issue #12.
+ This closes Distribute #12.
-* Protected against failures in tarfile extraction. This closes issue #10.
+* Protected against failures in tarfile extraction. This closes Distribute #10.
-* Made Jython api_tests.txt doctest compatible. This closes issue #7.
+* Made Jython api_tests.txt doctest compatible. This closes Distribute #7.
* sandbox.py replaced builtin type file with builtin function open. This
- closes issue #6.
+ closes Distribute #6.
-* Immediately close all file handles. This closes issue #3.
+* Immediately close all file handles. This closes Distribute #3.
-* Added compatibility with Subversion 1.6. This references issue #1.
+* Added compatibility with Subversion 1.6. This references Distribute #1.
pkg_resources
=============
@@ -628,18 +724,18 @@ pkg_resources
instead. Based on a patch from ronaldoussoren. This closes issue #5.
* Fixed a SandboxViolation for mkdir that could occur in certain cases.
- This closes issue #13.
+ This closes Distribute #13.
* Allow to find_on_path on systems with tight permissions to fail gracefully.
- This closes issue #9.
+ This closes Distribute #9.
* Corrected inconsistency between documentation and code of add_entry.
- This closes issue #8.
+ This closes Distribute #8.
-* Immediately close all file handles. This closes issue #3.
+* Immediately close all file handles. This closes Distribute #3.
easy_install
============
-* Immediately close all file handles. This closes issue #3.
+* Immediately close all file handles. This closes Distribute #3.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 8515babe..f1966505 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -9,6 +9,7 @@ Contributors
* Daniel Stutzbach
* Daniel Holth
* Dirley Rodrigues
+* Donald Stufft
* Grigory Petrov
* Hanno Schlichting
* Jannis Leidel
diff --git a/README.txt b/README.txt
index 7fe2d64a..53608bae 100755
--- a/README.txt
+++ b/README.txt
@@ -29,7 +29,7 @@ The recommended way to install setuptools on Windows is to download
`ez_setup.py`_ and run it. The script will download the appropriate .egg
file and install it for you.
-.. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/0.8/ez_setup.py
+.. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py
For best results, uninstall previous versions FIRST (see `Uninstalling`_).
@@ -45,7 +45,7 @@ Unix-based Systems including Mac OS X
Download `ez_setup.py`_ and run it using the target Python version. The script
will download the appropriate version and install it for you::
- > wget https://bitbucket.org/pypa/setuptools/raw/0.8/ez_setup.py -O - | python
+ > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python
Note that you will may need to invoke the command with superuser privileges to
install to the system Python.
@@ -53,7 +53,7 @@ install to the system Python.
Alternatively, on Python 2.6 and later, Setuptools may be installed to a
user-local path::
- > wget https://bitbucket.org/pypa/setuptools/raw/0.8/ez_setup.py
+ > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py
> python ez_setup.py --user
@@ -66,7 +66,7 @@ tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_
and run setup.py with any supported distutils and Setuptools options.
For example::
- setuptools-0.8$ python setup.py --prefix=/opt/setuptools
+ setuptools-x.x$ python setup.py --prefix=/opt/setuptools
Use ``--help`` to get a full options list, but we recommend consulting
the `EasyInstall manual`_ for detailed instructions, especially `the section
@@ -157,7 +157,7 @@ Credits
aspects of ``easy_install``, and supplied the doctests for the command-line
``.exe`` wrappers on Windows.
-* Phillip J. Eby is the principal author and maintainer of setuptools, and
+* Phillip J. Eby is the seminal author of setuptools, and
first proposed the idea of an importable binary distribution format for
Python application plug-ins.
@@ -167,4 +167,8 @@ Credits
"Code Bear" Taylor) contributed their time and stress as guinea pigs for the
use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
+* Since the merge with Distribute, Jason R. Coombs is the
+ maintainer of setuptools. The project is maintained in coordination with
+ the Python Packaging Authority (PyPA) and the larger Python community.
+
.. _files:
diff --git a/_markerlib/markers.py b/_markerlib/markers.py
index c93d7f3b..fa837061 100644
--- a/_markerlib/markers.py
+++ b/_markerlib/markers.py
@@ -49,6 +49,10 @@ _VARS = {'sys.platform': sys.platform,
'extra': None # wheel extension
}
+for var in list(_VARS.keys()):
+ if '.' in var:
+ _VARS[var.replace('.', '_')] = _VARS[var]
+
def default_environment():
"""Return copy of default PEP 385 globals dictionary."""
return dict(_VARS)
diff --git a/docs/conf.py b/docs/conf.py
index 3fccd87f..fbdb8b51 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -15,6 +15,7 @@
# serve to show the default.
import sys, os
+import setuptools
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -48,9 +49,9 @@ copyright = '2009-2013, The fellowship of the packaging'
# built documents.
#
# The short X.Y version.
-version = '0.8'
+version = setuptools.__version__
# The full version, including alpha/beta/rc tags.
-release = '0.8'
+release = setuptools.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/ez_setup.py b/ez_setup.py
index c225c6ae..55434eb7 100644
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -28,7 +28,7 @@ try:
except ImportError:
USER_SITE = None
-DEFAULT_VERSION = "0.8"
+DEFAULT_VERSION = "0.9.7"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
@@ -100,6 +100,12 @@ def _do_download(version, download_base, to_dir, download_delay):
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
+
+ # Remove previously-imported pkg_resources if present (see
+ # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
+ if 'pkg_resources' in sys.modules:
+ del sys.modules['pkg_resources']
+
import setuptools
setuptools.bootstrap_install_from = egg
diff --git a/pkg_resources.py b/pkg_resources.py
index 3dc85525..36a0e6ed 100644
--- a/pkg_resources.py
+++ b/pkg_resources.py
@@ -14,6 +14,8 @@ method.
"""
import sys, os, time, re, imp, types, zipfile, zipimport
+import warnings
+import stat
try:
from urlparse import urlparse, urlunparse
except ImportError:
@@ -26,7 +28,7 @@ except NameError:
try:
basestring
next = lambda o: o.next()
- from cStringIO import StringIO
+ from cStringIO import StringIO as BytesIO
def exec_(code, globs=None, locs=None):
if globs is None:
frame = sys._getframe(1)
@@ -39,7 +41,7 @@ try:
exec("""exec code in globs, locs""")
except NameError:
basestring = str
- from io import StringIO
+ from io import BytesIO
exec_ = eval("exec")
def execfile(fn, globs=None, locs=None):
if globs is None:
@@ -1022,9 +1024,34 @@ variable to point to an accessible directory.
except:
self.extraction_error()
+ self._warn_unsafe_extraction_path(extract_path)
+
self.cached_files[target_path] = 1
return target_path
+ @staticmethod
+ def _warn_unsafe_extraction_path(path):
+ """
+ If the default extraction path is overridden and set to an insecure
+ location, such as /tmp, it opens up an opportunity for an attacker to
+ replace an extracted file with an unauthorized payload. Warn the user
+ if a known insecure location is used.
+
+ See Distribute #375 for more details.
+ """
+ if os.name == 'nt' and not path.startswith(os.environ['windir']):
+ # On Windows, permissions are generally restrictive by default
+ # and temp directories are not writable by other users, so
+ # bypass the warning.
+ return
+ mode = os.stat(path).st_mode
+ if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+ msg = ("%s is writable by group/others and vulnerable to attack "
+ "when "
+ "used with get_resource_filename. Consider a more secure "
+ "location (set with .set_extraction_path or the "
+ "PYTHON_EGG_CACHE environment variable)." % path)
+ warnings.warn(msg, UserWarning)
@@ -1376,7 +1403,7 @@ class NullProvider:
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
- return StringIO(self.get_resource_string(manager, resource_name))
+ return BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
@@ -1946,15 +1973,6 @@ def find_in_zip(importer, path_item, only=False):
register_finder(zipimport.zipimporter, find_in_zip)
-def StringIO(*args, **kw):
- """Thunk to load the real StringIO on demand"""
- global StringIO
- try:
- from cStringIO import StringIO
- except ImportError:
- from io import StringIO
- return StringIO(*args,**kw)
-
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object,find_nothing)
diff --git a/release.py b/release.py
index 175e1463..bad7e0ee 100644
--- a/release.py
+++ b/release.py
@@ -1,274 +1,61 @@
-#!/usr/bin/env python
-
"""
-Script to fully automate the release process. Requires Python 2.6+
-with sphinx installed and the 'hg' command on the path.
+Setuptools is released using 'jaraco.packaging.release'. To make a release,
+install jaraco.packaging and run 'python -m jaraco.packaging.release'
"""
-from __future__ import print_function
-
-import subprocess
-import shutil
-import os
-import sys
-import getpass
-import collections
-import itertools
import re
+import os
+import subprocess
-try:
- import urllib.request as urllib_request
-except ImportError:
- import urllib2 as urllib_request
-
-try:
- input = raw_input
-except NameError:
- pass
-
-try:
- import keyring
-except Exception:
- pass
-
-VERSION = '0.8'
-PACKAGE_INDEX = 'https://pypi.python.org/pypi'
-
-def set_versions():
- global VERSION
- version = input("Release as version [%s]> " % VERSION) or VERSION
- if version != VERSION:
- VERSION = bump_versions(version)
-
-def infer_next_version(version):
- """
- Infer a next version from the current version by incrementing the last
- number or appending a number.
-
- >>> infer_next_version('1.0')
- '1.1'
-
- >>> infer_next_version('1.0b')
- '1.0b1'
-
- >>> infer_next_version('1.0.9')
- '1.0.10'
-
- >>> infer_next_version('1')
- '2'
- >>> infer_next_version('')
- '1'
- """
- def incr(match):
- ver = int(match.group(0) or '0')
- return str(ver + 1)
- return re.sub('\d*$', incr, version)
+def before_upload():
+ _linkify('CHANGES.txt', 'CHANGES (links).txt')
+ _add_bootstrap_bookmark()
files_with_versions = (
- 'docs/conf.py', 'setup.py', 'release.py', 'ez_setup.py', 'README.txt',
- 'setuptools/__init__.py',
+ 'ez_setup.py', 'setuptools/__init__.py',
)
-def get_repo_name():
- """
- Get the repo name from the hgrc default path.
- """
- default = subprocess.check_output('hg paths default').strip()
- parts = default.split('/')
- if parts[-1] == '':
- parts.pop()
- return '/'.join(parts[-2:])
-
-def get_mercurial_creds(system='https://bitbucket.org', username=None):
- """
- Return named tuple of username,password in much the same way that
- Mercurial would (from the keyring).
- """
- # todo: consider getting this from .hgrc
- username = username or getpass.getuser()
- keyring_username = '@@'.join((username, system))
- system = 'Mercurial'
- password = (
- keyring.get_password(system, keyring_username)
- if 'keyring' in globals()
- else None
- )
- if not password:
- password = getpass.getpass()
- Credential = collections.namedtuple('Credential', 'username password')
- return Credential(username, password)
-
-def add_milestone_and_version(version):
- auth = 'Basic ' + ':'.join(get_mercurial_creds()).encode('base64').strip()
- headers = {
- 'Authorization': auth,
- }
- base = 'https://api.bitbucket.org'
- for type in 'milestones', 'versions':
- url = (base + '/1.0/repositories/{repo}/issues/{type}'
- .format(repo = get_repo_name(), type=type))
- req = urllib_request.Request(url = url, headers = headers,
- data='name='+version)
- try:
- urllib_request.urlopen(req)
- except urllib_request.HTTPError as e:
- print(e.fp.read())
-
-def bump_versions(target_ver):
- for filename in files_with_versions:
- bump_version(filename, target_ver)
- subprocess.check_call(['hg', 'ci', '-m',
- 'Bumped to {target_ver} in preparation for next '
- 'release.'.format(**vars())])
- return target_ver
-
-def bump_version(filename, target_ver):
- with open(filename, 'rb') as f:
- lines = [
- line.replace(VERSION.encode('ascii'), target_ver.encode('ascii'))
- for line in f
- ]
- with open(filename, 'wb') as f:
- f.writelines(lines)
-
-def do_release():
- assert all(map(os.path.exists, files_with_versions)), (
- "Expected file(s) missing")
-
- assert has_sphinx(), "You must have Sphinx installed to release"
-
- set_versions()
-
- res = raw_input('Have you read through the SCM changelog and '
- 'confirmed the changelog is current for releasing {VERSION}? '
- .format(**globals()))
- if not res.lower().startswith('y'):
- print("Please do that")
- raise SystemExit(1)
-
- print("Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools")
- res = raw_input('Have you or has someone verified that the tests '
- 'pass on this revision? ')
- if not res.lower().startswith('y'):
- print("Please do that")
- raise SystemExit(2)
-
- subprocess.check_call(['hg', 'tag', VERSION])
-
- subprocess.check_call(['hg', 'update', VERSION])
-
- upload_to_pypi()
-
- # update to the tip for the next operation
- subprocess.check_call(['hg', 'update'])
-
- # we just tagged the current version, bump for the next release.
- next_ver = bump_versions(infer_next_version(VERSION))
-
- # push the changes
- subprocess.check_call(['hg', 'push'])
-
- add_milestone_and_version(next_ver)
-
-def upload_to_pypi():
- linkify('CHANGES.txt', 'CHANGES (links).txt')
-
- has_docs = build_docs()
- if os.path.isdir('./dist'):
- shutil.rmtree('./dist')
- cmd = [
- sys.executable, 'setup.py', '-q',
- 'egg_info', '-RD', '-b', '',
- 'sdist',
- #'register', '-r', PACKAGE_INDEX,
- #'upload', '-r', PACKAGE_INDEX,
- ]
- if has_docs:
- cmd.extend([
- 'upload_docs', '-r', PACKAGE_INDEX
- ])
- env = os.environ.copy()
- env["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
- subprocess.check_call(cmd, env=env)
-
-def has_sphinx():
- try:
- devnull = open(os.path.devnull, 'wb')
- subprocess.Popen(['sphinx-build', '--version'], stdout=devnull,
- stderr=subprocess.STDOUT).wait()
- except Exception:
- return False
- return True
-
-def build_docs():
- if not os.path.isdir('docs'):
- return
- if os.path.isdir('docs/build'):
- shutil.rmtree('docs/build')
- cmd = [
- 'sphinx-build',
- '-b', 'html',
- '-d', 'build/doctrees',
- '.',
- 'build/html',
- ]
- subprocess.check_call(cmd, cwd='docs')
- return True
-
-def linkify(source, dest):
- with open(source) as source:
- out = _linkified_text(source.read())
- with open(dest, 'w') as dest:
- dest.write(out)
-
-def _linkified(rst_path):
- "return contents of reStructureText file with linked issue references"
- rst_file = open(rst_path)
- rst_content = rst_file.read()
- rst_file.close()
-
- return _linkified_text(rst_content)
-
-def _linkified_text(rst_content):
- # first identify any existing HREFs so they're not changed
- HREF_pattern = re.compile('`.*?`_', re.MULTILINE | re.DOTALL)
-
- # split on the HREF pattern, returning the parts to be linkified
- plain_text_parts = HREF_pattern.split(rst_content)
- anchors = []
- linkified_parts = [_linkified_part(part, anchors)
- for part in plain_text_parts]
- pairs = itertools.izip_longest(
- linkified_parts,
- HREF_pattern.findall(rst_content),
- fillvalue='',
- )
- rst_content = ''.join(flatten(pairs))
-
- anchors = sorted(anchors)
+test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
+
+os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
+
+link_patterns = [
+ r"(Issue )?#(?P<issue>\d+)",
+ r"Distribute #(?P<distribute>\d+)",
+ r"Buildout #(?P<buildout>\d+)",
+ r"Old Setuptools #(?P<old_setuptools>\d+)",
+ r"Jython #(?P<jython>\d+)",
+ r"Python #(?P<python>\d+)",
+]
+
+issue_urls = dict(
+ issue='https://bitbucket.org/pypa/setuptools/issue/{issue}',
+ distribute='https://bitbucket.org/tarek/distribute/issue/{distribute}',
+ buildout='https://github.com/buildout/buildout/issues/{buildout}',
+ old_setuptools='http://bugs.python.org/setuptools/issue{old_setuptools}',
+ jython='http://bugs.jython.org/issue{jython}',
+ python='http://bugs.python.org/issue{python}',
+)
- bitroot = 'https://bitbucket.org/tarek/distribute'
- rst_content += "\n"
- for x in anchors:
- issue = re.findall(r'\d+', x)[0]
- rst_content += '.. _`%s`: %s/issue/%s\n' % (x, bitroot, issue)
- rst_content += "\n"
- return rst_content
-def flatten(listOfLists):
- "Flatten one level of nesting"
- return itertools.chain.from_iterable(listOfLists)
+def _linkify(source, dest):
+ pattern = '|'.join(link_patterns)
+ with open(source) as source:
+ out = re.sub(pattern, replacer, source.read())
+ with open(dest, 'w') as dest:
+ dest.write(out)
-def _linkified_part(text, anchors):
- """
- Linkify a part and collect any anchors generated
- """
- revision = re.compile(r'\b(issue\s+#?\d+)\b', re.M | re.I)
+def replacer(match):
+ text = match.group(0)
+ match_dict = match.groupdict()
+ for key in match_dict:
+ if match_dict[key]:
+ url = issue_urls[key].format(**match_dict)
+ return "`{text} <{url}>`_".format(text=text, url=url)
- anchors.extend(revision.findall(text)) # ['Issue #43', ...]
- return revision.sub(r'`\1`_', text)
-if __name__ == '__main__':
- do_release()
+def _add_bootstrap_bookmark():
+ cmd = ['hg', 'bookmark', '-i', 'bootstrap', '-f']
+ subprocess.Popen(cmd)
diff --git a/setup.py b/setup.py
index d56c491a..629f2fff 100755
--- a/setup.py
+++ b/setup.py
@@ -3,59 +3,39 @@
import sys
import os
import textwrap
-import re
# Allow to run setup.py from another directory.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
src_root = None
-do_2to3 = False
-if sys.version_info >= (3,) and do_2to3:
- tmp_src = os.path.join("build", "src")
- from distutils.filelist import FileList
- from distutils import dir_util, file_util, util, log
- log.set_verbosity(1)
- fl = FileList()
- manifest_file = open("MANIFEST.in")
- for line in manifest_file:
- fl.process_template_line(line)
- manifest_file.close()
- dir_util.create_tree(tmp_src, fl.files)
- outfiles_2to3 = []
- dist_script = os.path.join("build", "src", "ez_setup.py")
- for f in fl.files:
- outf, copied = file_util.copy_file(f, os.path.join(tmp_src, f), update=1)
- if copied and outf.endswith(".py") and outf != dist_script:
- outfiles_2to3.append(outf)
- if copied and outf.endswith('api_tests.txt'):
- # XXX support this in distutils as well
- from lib2to3.main import main
- main('lib2to3.fixes', ['-wd', os.path.join(tmp_src, 'tests', 'api_tests.txt')])
-
- util.run_2to3(outfiles_2to3)
-
- # arrange setup to use the copy
- sys.path.insert(0, os.path.abspath(tmp_src))
- src_root = tmp_src
from distutils.util import convert_path
-d = {}
+command_ns = {}
init_path = convert_path('setuptools/command/__init__.py')
init_file = open(init_path)
-exec(init_file.read(), d)
+exec(init_file.read(), command_ns)
init_file.close()
-SETUP_COMMANDS = d['__all__']
-VERSION = "0.8"
+SETUP_COMMANDS = command_ns['__all__']
-from setuptools import setup, find_packages
+main_ns = {}
+init_path = convert_path('setuptools/__init__.py')
+init_file = open(init_path)
+exec(init_file.read(), main_ns)
+init_file.close()
+
+import setuptools
from setuptools.command.build_py import build_py as _build_py
from setuptools.command.test import test as _test
scripts = []
console_scripts = ["easy_install = setuptools.command.easy_install:main"]
+
+# Gentoo distributions manage the python-version-specific scripts themselves,
+# so they define an environment variable to suppress the creation of the
+# version-specific scripts.
if os.environ.get("SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT") in (None, "", "0") and \
os.environ.get("DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT") in (None, "", "0"):
console_scripts.append("easy_install-%s = setuptools.command.easy_install:main" % sys.version[:3])
@@ -73,15 +53,6 @@ class build_py(_build_py):
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
- # avoid a bootstrapping issue with easy_install -U (when the
- # previous version doesn't have convert_2to3_doctests)
- if not hasattr(self.distribution, 'convert_2to3_doctests'):
- continue
- if not do_2to3:
- continue
- if copied and srcfile in self.distribution.convert_2to3_doctests:
- self.__doctests_2to3.append(outf)
-
class test(_test):
"""Specific test class to avoid rewriting the entry_points.txt"""
def run(self):
@@ -126,12 +97,12 @@ if sys.platform == 'win32' or os.environ.get("SETUPTOOLS_INSTALL_WINDOWS_SPECIFI
package_data.setdefault('setuptools', []).extend(['*.exe'])
package_data.setdefault('setuptools.command', []).extend(['*.xml'])
-dist = setup(
+setup_params = dict(
name="setuptools",
- version=VERSION,
+ version=main_ns['__version__'],
description="Easily download, build, install, upgrade, and uninstall "
"Python packages",
- author="The fellowship of the packaging",
+ author="Python Packaging Authority",
author_email="distutils-sig@python.org",
license="PSF or ZPL",
long_description = long_description,
@@ -139,7 +110,7 @@ dist = setup(
url = "https://pypi.python.org/pypi/setuptools",
test_suite = 'setuptools.tests',
src_root = src_root,
- packages = find_packages(),
+ packages = setuptools.find_packages(),
package_data = package_data,
py_modules = ['pkg_resources', 'easy_install'],
@@ -148,7 +119,7 @@ dist = setup(
cmdclass = {'test': test},
entry_points = {
- "distutils.commands" : [
+ "distutils.commands": [
"%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
for cmd in SETUP_COMMANDS
],
@@ -228,3 +199,6 @@ dist = setup(
scripts = [],
# tests_require = "setuptools[ssl]",
)
+
+if __name__ == '__main__':
+ dist = setuptools.setup(**setup_params)
diff --git a/setuptools.egg-info/entry_points.txt b/setuptools.egg-info/entry_points.txt
index 20002e25..d64b3c28 100644
--- a/setuptools.egg-info/entry_points.txt
+++ b/setuptools.egg-info/entry_points.txt
@@ -1,62 +1,62 @@
-[distutils.commands]
-bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
-rotate = setuptools.command.rotate:rotate
-develop = setuptools.command.develop:develop
-setopt = setuptools.command.setopt:setopt
-build_py = setuptools.command.build_py:build_py
-saveopts = setuptools.command.saveopts:saveopts
-egg_info = setuptools.command.egg_info:egg_info
-register = setuptools.command.register:register
-upload_docs = setuptools.command.upload_docs:upload_docs
-install_egg_info = setuptools.command.install_egg_info:install_egg_info
-alias = setuptools.command.alias:alias
-easy_install = setuptools.command.easy_install:easy_install
-install_scripts = setuptools.command.install_scripts:install_scripts
-bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
-bdist_egg = setuptools.command.bdist_egg:bdist_egg
-install = setuptools.command.install:install
-test = setuptools.command.test:test
-install_lib = setuptools.command.install_lib:install_lib
-build_ext = setuptools.command.build_ext:build_ext
-sdist = setuptools.command.sdist:sdist
-
-[egg_info.writers]
-dependency_links.txt = setuptools.command.egg_info:overwrite_arg
-requires.txt = setuptools.command.egg_info:write_requirements
-PKG-INFO = setuptools.command.egg_info:write_pkg_info
-eager_resources.txt = setuptools.command.egg_info:overwrite_arg
-top_level.txt = setuptools.command.egg_info:write_toplevel_names
-namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
-entry_points.txt = setuptools.command.egg_info:write_entries
-depends.txt = setuptools.command.egg_info:warn_depends_obsolete
-
-[console_scripts]
-easy_install = setuptools.command.easy_install:main
-easy_install-2.5 = setuptools.command.easy_install:main
-
-[setuptools.file_finders]
-svn_cvs = setuptools.command.sdist:_default_revctrl
-
-[distutils.setup_keywords]
-dependency_links = setuptools.dist:assert_string_list
-entry_points = setuptools.dist:check_entry_points
-extras_require = setuptools.dist:check_extras
-use_2to3_exclude_fixers = setuptools.dist:assert_string_list
-package_data = setuptools.dist:check_package_data
-install_requires = setuptools.dist:check_requirements
-use_2to3 = setuptools.dist:assert_bool
-use_2to3_fixers = setuptools.dist:assert_string_list
-include_package_data = setuptools.dist:assert_bool
-exclude_package_data = setuptools.dist:check_package_data
-namespace_packages = setuptools.dist:check_nsp
-test_suite = setuptools.dist:check_test_suite
-eager_resources = setuptools.dist:assert_string_list
-zip_safe = setuptools.dist:assert_bool
-test_loader = setuptools.dist:check_importable
-packages = setuptools.dist:check_packages
-convert_2to3_doctests = setuptools.dist:assert_string_list
-tests_require = setuptools.dist:check_requirements
-
-[setuptools.installation]
-eggsecutable = setuptools.command.easy_install:bootstrap
-
+[setuptools.installation]
+eggsecutable = setuptools.command.easy_install:bootstrap
+
+[console_scripts]
+easy_install = setuptools.command.easy_install:main
+easy_install-3.3 = setuptools.command.easy_install:main
+
+[distutils.setup_keywords]
+use_2to3 = setuptools.dist:assert_bool
+namespace_packages = setuptools.dist:check_nsp
+package_data = setuptools.dist:check_package_data
+use_2to3_exclude_fixers = setuptools.dist:assert_string_list
+dependency_links = setuptools.dist:assert_string_list
+use_2to3_fixers = setuptools.dist:assert_string_list
+test_suite = setuptools.dist:check_test_suite
+exclude_package_data = setuptools.dist:check_package_data
+extras_require = setuptools.dist:check_extras
+install_requires = setuptools.dist:check_requirements
+eager_resources = setuptools.dist:assert_string_list
+include_package_data = setuptools.dist:assert_bool
+packages = setuptools.dist:check_packages
+entry_points = setuptools.dist:check_entry_points
+zip_safe = setuptools.dist:assert_bool
+tests_require = setuptools.dist:check_requirements
+convert_2to3_doctests = setuptools.dist:assert_string_list
+test_loader = setuptools.dist:check_importable
+
+[setuptools.file_finders]
+svn_cvs = setuptools.command.sdist:_default_revctrl
+
+[egg_info.writers]
+top_level.txt = setuptools.command.egg_info:write_toplevel_names
+PKG-INFO = setuptools.command.egg_info:write_pkg_info
+eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+depends.txt = setuptools.command.egg_info:warn_depends_obsolete
+dependency_links.txt = setuptools.command.egg_info:overwrite_arg
+entry_points.txt = setuptools.command.egg_info:write_entries
+requires.txt = setuptools.command.egg_info:write_requirements
+
+[distutils.commands]
+test = setuptools.command.test:test
+bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
+alias = setuptools.command.alias:alias
+sdist = setuptools.command.sdist:sdist
+develop = setuptools.command.develop:develop
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+setopt = setuptools.command.setopt:setopt
+egg_info = setuptools.command.egg_info:egg_info
+build_ext = setuptools.command.build_ext:build_ext
+upload_docs = setuptools.command.upload_docs:upload_docs
+easy_install = setuptools.command.easy_install:easy_install
+install = setuptools.command.install:install
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
+bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+install_lib = setuptools.command.install_lib:install_lib
+rotate = setuptools.command.rotate:rotate
+saveopts = setuptools.command.saveopts:saveopts
+install_scripts = setuptools.command.install_scripts:install_scripts
+build_py = setuptools.command.build_py:build_py
+register = setuptools.command.register:register
+
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index a8e7617a..18dd363d 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -8,7 +8,7 @@ from distutils.util import convert_path
import os
import sys
-__version__ = '0.8'
+__version__ = '0.9.7'
__all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
'find_packages'
diff --git a/setuptools/_backport/__init__.py b/setuptools/_backport/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/setuptools/_backport/__init__.py
diff --git a/setuptools/_backport/hashlib/__init__.py b/setuptools/_backport/hashlib/__init__.py
new file mode 100644
index 00000000..5aeab496
--- /dev/null
+++ b/setuptools/_backport/hashlib/__init__.py
@@ -0,0 +1,146 @@
+# $Id$
+#
+# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
+# Licensed to PSF under a Contributor Agreement.
+#
+
+__doc__ = """hashlib module - A common interface to many hash functions.
+
+new(name, string='') - returns a new hash object implementing the
+ given hash function; initializing the hash
+ using the given string data.
+
+Named constructor functions are also available, these are much faster
+than using new():
+
+md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
+
+More algorithms may be available on your platform but the above are
+guaranteed to exist.
+
+NOTE: If you want the adler32 or crc32 hash functions they are available in
+the zlib module.
+
+Choose your hash function wisely. Some have known collision weaknesses.
+sha384 and sha512 will be slow on 32 bit platforms.
+
+Hash objects have these methods:
+ - update(arg): Update the hash object with the string arg. Repeated calls
+ are equivalent to a single call with the concatenation of all
+ the arguments.
+ - digest(): Return the digest of the strings passed to the update() method
+ so far. This may contain non-ASCII characters, including
+ NUL bytes.
+ - hexdigest(): Like digest() except the digest is returned as a string of
+ double length, containing only hexadecimal digits.
+ - copy(): Return a copy (clone) of the hash object. This can be used to
+ efficiently compute the digests of strings that share a common
+ initial substring.
+
+For example, to obtain the digest of the string 'Nobody inspects the
+spammish repetition':
+
+ >>> import hashlib
+ >>> m = hashlib.md5()
+ >>> m.update("Nobody inspects")
+ >>> m.update(" the spammish repetition")
+ >>> m.digest()
+ '\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
+
+More condensed:
+
+ >>> hashlib.sha224("Nobody inspects the spammish repetition").hexdigest()
+ 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
+
+"""
+
+# This tuple and __get_builtin_constructor() must be modified if a new
+# always available algorithm is added.
+__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+
+algorithms = __always_supported
+
+__all__ = __always_supported + ('new', 'algorithms')
+
+
+def __get_builtin_constructor(name):
+ try:
+ if name in ('SHA1', 'sha1'):
+ import _sha
+ return _sha.new
+ elif name in ('MD5', 'md5'):
+ import md5
+ return md5.new
+ elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
+ import _sha256
+ bs = name[3:]
+ if bs == '256':
+ return _sha256.sha256
+ elif bs == '224':
+ return _sha256.sha224
+ elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
+ import _sha512
+ bs = name[3:]
+ if bs == '512':
+ return _sha512.sha512
+ elif bs == '384':
+ return _sha512.sha384
+ except ImportError:
+ pass # no extension module, this hash is unsupported.
+
+ raise ValueError('unsupported hash type %s' % name)
+
+
+def __get_openssl_constructor(name):
+ try:
+ f = getattr(_hashlib, 'openssl_' + name)
+ # Allow the C module to raise ValueError. The function will be
+ # defined but the hash not actually available thanks to OpenSSL.
+ f()
+ # Use the C function directly (very fast)
+ return f
+ except (AttributeError, ValueError):
+ return __get_builtin_constructor(name)
+
+
+def __py_new(name, string=''):
+ """new(name, string='') - Return a new hashing object using the named algorithm;
+ optionally initialized with a string.
+ """
+ return __get_builtin_constructor(name)(string)
+
+
+def __hash_new(name, string=''):
+ """new(name, string='') - Return a new hashing object using the named algorithm;
+ optionally initialized with a string.
+ """
+ try:
+ return _hashlib.new(name, string)
+ except ValueError:
+ # If the _hashlib module (OpenSSL) doesn't support the named
+ # hash, try using our builtin implementations.
+ # This allows for SHA224/256 and SHA384/512 support even though
+ # the OpenSSL library prior to 0.9.8 doesn't provide them.
+ return __get_builtin_constructor(name)(string)
+
+
+try:
+ import _hashlib
+ new = __hash_new
+ __get_hash = __get_openssl_constructor
+except ImportError:
+ new = __py_new
+ __get_hash = __get_builtin_constructor
+
+for __func_name in __always_supported:
+ # try them all, some may not work due to the OpenSSL
+ # version not supporting that algorithm.
+ try:
+ globals()[__func_name] = __get_hash(__func_name)
+ except ValueError:
+ import logging
+ logging.exception('code for hash %s was not found.', __func_name)
+
+# Cleanup locals()
+del __always_supported, __func_name, __get_hash
+del __py_new, __hash_new, __get_openssl_constructor
diff --git a/setuptools/_backport/hashlib/_sha.py b/setuptools/_backport/hashlib/_sha.py
new file mode 100644
index 00000000..d49993c8
--- /dev/null
+++ b/setuptools/_backport/hashlib/_sha.py
@@ -0,0 +1,359 @@
+# -*- coding: iso-8859-1 -*-
+"""A sample implementation of SHA-1 in pure Python.
+
+ Framework adapted from Dinu Gherman's MD5 implementation by
+ J. Hallén and L. Creighton. SHA-1 implementation based directly on
+ the text of the NIST standard FIPS PUB 180-1.
+"""
+
+
+__date__ = '2004-11-17'
+__version__ = 0.91 # Modernised by J. Hallén and L. Creighton for Pypy
+
+
+import struct, copy
+
+
+# ======================================================================
+# Bit-Manipulation helpers
+#
+# _long2bytes() was contributed by Barry Warsaw
+# and is reused here with tiny modifications.
+# ======================================================================
+
+def _long2bytesBigEndian(n, blocksize=0):
+ """Convert a long integer to a byte string.
+
+ If optional blocksize is given and greater than zero, pad the front
+ of the byte string with binary zeros so that the length is a multiple
+ of blocksize.
+ """
+
+ # After much testing, this algorithm was deemed to be the fastest.
+ s = ''
+ pack = struct.pack
+ while n > 0:
+ s = pack('>I', n & 0xffffffff) + s
+ n = n >> 32
+
+ # Strip off leading zeros.
+ for i in range(len(s)):
+ if s[i] != '\000':
+ break
+ else:
+ # Only happens when n == 0.
+ s = '\000'
+ i = 0
+
+ s = s[i:]
+
+ # Add back some pad bytes. This could be done more efficiently
+ # w.r.t. the de-padding being done above, but sigh...
+ if blocksize > 0 and len(s) % blocksize:
+ s = (blocksize - len(s) % blocksize) * '\000' + s
+
+ return s
+
+
+def _bytelist2longBigEndian(list):
+ "Transform a list of characters into a list of longs."
+
+ imax = len(list) // 4
+ hl = [0] * imax
+
+ j = 0
+ i = 0
+ while i < imax:
+ b0 = ord(list[j]) << 24
+ b1 = ord(list[j+1]) << 16
+ b2 = ord(list[j+2]) << 8
+ b3 = ord(list[j+3])
+ hl[i] = b0 | b1 | b2 | b3
+ i = i+1
+ j = j+4
+
+ return hl
+
+
+def _rotateLeft(x, n):
+ "Rotate x (32 bit) left n bits circularly."
+
+ return (x << n) | (x >> (32-n))
+
+
+# ======================================================================
+# The SHA transformation functions
+#
+# ======================================================================
+
+def f0_19(B, C, D):
+ return (B & C) | ((~ B) & D)
+
+def f20_39(B, C, D):
+ return B ^ C ^ D
+
+def f40_59(B, C, D):
+ return (B & C) | (B & D) | (C & D)
+
+def f60_79(B, C, D):
+ return B ^ C ^ D
+
+
+f = [f0_19, f20_39, f40_59, f60_79]
+
+# Constants to be used
+K = [
+ 0x5A827999, # ( 0 <= t <= 19)
+ 0x6ED9EBA1, # (20 <= t <= 39)
+ 0x8F1BBCDC, # (40 <= t <= 59)
+ 0xCA62C1D6 # (60 <= t <= 79)
+ ]
+
+class sha:
+ "An implementation of the MD5 hash function in pure Python."
+
+ digest_size = digestsize = 20
+ block_size = 1
+
+ def __init__(self):
+ "Initialisation."
+
+ # Initial message length in bits(!).
+ self.length = 0
+ self.count = [0, 0]
+
+ # Initial empty message as a sequence of bytes (8 bit characters).
+ self.input = []
+
+ # Call a separate init function, that can be used repeatedly
+ # to start from scratch on the same object.
+ self.init()
+
+
+ def init(self):
+ "Initialize the message-digest and set all fields to zero."
+
+ self.length = 0
+ self.input = []
+
+ # Initial 160 bit message digest (5 times 32 bit).
+ self.H0 = 0x67452301
+ self.H1 = 0xEFCDAB89
+ self.H2 = 0x98BADCFE
+ self.H3 = 0x10325476
+ self.H4 = 0xC3D2E1F0
+
+ def _transform(self, W):
+
+ for t in range(16, 80):
+ W.append(_rotateLeft(
+ W[t-3] ^ W[t-8] ^ W[t-14] ^ W[t-16], 1) & 0xffffffff)
+
+ A = self.H0
+ B = self.H1
+ C = self.H2
+ D = self.H3
+ E = self.H4
+
+ """
+ This loop was unrolled to gain about 10% in speed
+ for t in range(0, 80):
+ TEMP = _rotateLeft(A, 5) + f[t/20] + E + W[t] + K[t/20]
+ E = D
+ D = C
+ C = _rotateLeft(B, 30) & 0xffffffff
+ B = A
+ A = TEMP & 0xffffffff
+ """
+
+ for t in range(0, 20):
+ TEMP = _rotateLeft(A, 5) + ((B & C) | ((~ B) & D)) + E + W[t] + K[0]
+ E = D
+ D = C
+ C = _rotateLeft(B, 30) & 0xffffffff
+ B = A
+ A = TEMP & 0xffffffff
+
+ for t in range(20, 40):
+ TEMP = _rotateLeft(A, 5) + (B ^ C ^ D) + E + W[t] + K[1]
+ E = D
+ D = C
+ C = _rotateLeft(B, 30) & 0xffffffff
+ B = A
+ A = TEMP & 0xffffffff
+
+ for t in range(40, 60):
+ TEMP = _rotateLeft(A, 5) + ((B & C) | (B & D) | (C & D)) + E + W[t] + K[2]
+ E = D
+ D = C
+ C = _rotateLeft(B, 30) & 0xffffffff
+ B = A
+ A = TEMP & 0xffffffff
+
+ for t in range(60, 80):
+ TEMP = _rotateLeft(A, 5) + (B ^ C ^ D) + E + W[t] + K[3]
+ E = D
+ D = C
+ C = _rotateLeft(B, 30) & 0xffffffff
+ B = A
+ A = TEMP & 0xffffffff
+
+
+ self.H0 = (self.H0 + A) & 0xffffffff
+ self.H1 = (self.H1 + B) & 0xffffffff
+ self.H2 = (self.H2 + C) & 0xffffffff
+ self.H3 = (self.H3 + D) & 0xffffffff
+ self.H4 = (self.H4 + E) & 0xffffffff
+
+
+ # Down from here all methods follow the Python Standard Library
+ # API of the sha module.
+
+ def update(self, inBuf):
+ """Add to the current message.
+
+ Update the md5 object with the string arg. Repeated calls
+ are equivalent to a single call with the concatenation of all
+ the arguments, i.e. m.update(a); m.update(b) is equivalent
+ to m.update(a+b).
+
+ The hash is immediately calculated for all full blocks. The final
+ calculation is made in digest(). It will calculate 1-2 blocks,
+ depending on how much padding we have to add. This allows us to
+ keep an intermediate value for the hash, so that we only need to
+ make minimal recalculation if we call update() to add more data
+ to the hashed string.
+ """
+
+ leninBuf = len(inBuf)
+
+ # Compute number of bytes mod 64.
+ index = (self.count[1] >> 3) & 0x3F
+
+ # Update number of bits.
+ self.count[1] = self.count[1] + (leninBuf << 3)
+ if self.count[1] < (leninBuf << 3):
+ self.count[0] = self.count[0] + 1
+ self.count[0] = self.count[0] + (leninBuf >> 29)
+
+ partLen = 64 - index
+
+ if leninBuf >= partLen:
+ self.input[index:] = list(inBuf[:partLen])
+ self._transform(_bytelist2longBigEndian(self.input))
+ i = partLen
+ while i + 63 < leninBuf:
+ self._transform(_bytelist2longBigEndian(list(inBuf[i:i+64])))
+ i = i + 64
+ else:
+ self.input = list(inBuf[i:leninBuf])
+ else:
+ i = 0
+ self.input = self.input + list(inBuf)
+
+
+ def digest(self):
+ """Terminate the message-digest computation and return digest.
+
+ Return the digest of the strings passed to the update()
+ method so far. This is a 16-byte string which may contain
+ non-ASCII characters, including null bytes.
+ """
+
+ H0 = self.H0
+ H1 = self.H1
+ H2 = self.H2
+ H3 = self.H3
+ H4 = self.H4
+ input = [] + self.input
+ count = [] + self.count
+
+ index = (self.count[1] >> 3) & 0x3f
+
+ if index < 56:
+ padLen = 56 - index
+ else:
+ padLen = 120 - index
+
+ padding = ['\200'] + ['\000'] * 63
+ self.update(padding[:padLen])
+
+ # Append length (before padding).
+ bits = _bytelist2longBigEndian(self.input[:56]) + count
+
+ self._transform(bits)
+
+ # Store state in digest.
+ digest = _long2bytesBigEndian(self.H0, 4) + \
+ _long2bytesBigEndian(self.H1, 4) + \
+ _long2bytesBigEndian(self.H2, 4) + \
+ _long2bytesBigEndian(self.H3, 4) + \
+ _long2bytesBigEndian(self.H4, 4)
+
+ self.H0 = H0
+ self.H1 = H1
+ self.H2 = H2
+ self.H3 = H3
+ self.H4 = H4
+ self.input = input
+ self.count = count
+
+ return digest
+
+
+ def hexdigest(self):
+ """Terminate and return digest in HEX form.
+
+ Like digest() except the digest is returned as a string of
+ length 32, containing only hexadecimal digits. This may be
+ used to exchange the value safely in email or other non-
+ binary environments.
+ """
+ return ''.join(['%02x' % ord(c) for c in self.digest()])
+
+ def copy(self):
+ """Return a clone object.
+
+ Return a copy ('clone') of the md5 object. This can be used
+ to efficiently compute the digests of strings that share
+ a common initial substring.
+ """
+
+ return copy.deepcopy(self)
+
+
+# ======================================================================
+# Mimic Python top-level functions from standard library API
+# for consistency with the _sha module of the standard library.
+# ======================================================================
+
+# These are mandatory variables in the module. They have constant values
+# in the SHA standard.
+
+digest_size = 20
+digestsize = 20
+blocksize = 1
+
+def new(arg=None):
+ """Return a new sha crypto object.
+
+ If arg is present, the method call update(arg) is made.
+ """
+
+ crypto = sha()
+ if arg:
+ crypto.update(arg)
+
+ return crypto
+
+
+if __name__ == "__main__":
+ a_str = "just a test string"
+
+ assert 'da39a3ee5e6b4b0d3255bfef95601890afd80709' == new().hexdigest()
+ assert '3f0cf2e3d9e5903e839417dfc47fed6bfa6457f6' == new(a_str).hexdigest()
+ assert '0852b254078fe3772568a4aba37b917f3d4066ba' == new(a_str*7).hexdigest()
+
+ s = new(a_str)
+ s.update(a_str)
+ assert '8862c1b50967f39d3db6bdc2877d9ccebd3102e5' == s.hexdigest()
diff --git a/setuptools/_backport/hashlib/_sha256.py b/setuptools/_backport/hashlib/_sha256.py
new file mode 100644
index 00000000..805dbd08
--- /dev/null
+++ b/setuptools/_backport/hashlib/_sha256.py
@@ -0,0 +1,260 @@
+import struct
+
+SHA_BLOCKSIZE = 64
+SHA_DIGESTSIZE = 32
+
+
+def new_shaobject():
+ return {
+ 'digest': [0]*8,
+ 'count_lo': 0,
+ 'count_hi': 0,
+ 'data': [0]* SHA_BLOCKSIZE,
+ 'local': 0,
+ 'digestsize': 0
+ }
+
+ROR = lambda x, y: (((x & 0xffffffff) >> (y & 31)) | (x << (32 - (y & 31)))) & 0xffffffff
+Ch = lambda x, y, z: (z ^ (x & (y ^ z)))
+Maj = lambda x, y, z: (((x | y) & z) | (x & y))
+S = lambda x, n: ROR(x, n)
+R = lambda x, n: (x & 0xffffffff) >> n
+Sigma0 = lambda x: (S(x, 2) ^ S(x, 13) ^ S(x, 22))
+Sigma1 = lambda x: (S(x, 6) ^ S(x, 11) ^ S(x, 25))
+Gamma0 = lambda x: (S(x, 7) ^ S(x, 18) ^ R(x, 3))
+Gamma1 = lambda x: (S(x, 17) ^ S(x, 19) ^ R(x, 10))
+
+def sha_transform(sha_info):
+ W = []
+
+ d = sha_info['data']
+ for i in xrange(0,16):
+ W.append( (d[4*i]<<24) + (d[4*i+1]<<16) + (d[4*i+2]<<8) + d[4*i+3])
+
+ for i in xrange(16,64):
+ W.append( (Gamma1(W[i - 2]) + W[i - 7] + Gamma0(W[i - 15]) + W[i - 16]) & 0xffffffff )
+
+ ss = sha_info['digest'][:]
+
+ def RND(a,b,c,d,e,f,g,h,i,ki):
+ t0 = h + Sigma1(e) + Ch(e, f, g) + ki + W[i];
+ t1 = Sigma0(a) + Maj(a, b, c);
+ d += t0;
+ h = t0 + t1;
+ return d & 0xffffffff, h & 0xffffffff
+
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],0,0x428a2f98);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],1,0x71374491);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],2,0xb5c0fbcf);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],3,0xe9b5dba5);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],4,0x3956c25b);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],5,0x59f111f1);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],6,0x923f82a4);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],7,0xab1c5ed5);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],8,0xd807aa98);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],9,0x12835b01);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],10,0x243185be);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],11,0x550c7dc3);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],12,0x72be5d74);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],13,0x80deb1fe);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],14,0x9bdc06a7);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],15,0xc19bf174);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],16,0xe49b69c1);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],17,0xefbe4786);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],18,0x0fc19dc6);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],19,0x240ca1cc);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],20,0x2de92c6f);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],21,0x4a7484aa);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],22,0x5cb0a9dc);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],23,0x76f988da);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],24,0x983e5152);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],25,0xa831c66d);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],26,0xb00327c8);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],27,0xbf597fc7);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],28,0xc6e00bf3);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],29,0xd5a79147);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],30,0x06ca6351);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],31,0x14292967);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],32,0x27b70a85);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],33,0x2e1b2138);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],34,0x4d2c6dfc);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],35,0x53380d13);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],36,0x650a7354);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],37,0x766a0abb);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],38,0x81c2c92e);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],39,0x92722c85);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],40,0xa2bfe8a1);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],41,0xa81a664b);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],42,0xc24b8b70);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],43,0xc76c51a3);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],44,0xd192e819);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],45,0xd6990624);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],46,0xf40e3585);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],47,0x106aa070);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],48,0x19a4c116);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],49,0x1e376c08);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],50,0x2748774c);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],51,0x34b0bcb5);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],52,0x391c0cb3);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],53,0x4ed8aa4a);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],54,0x5b9cca4f);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],55,0x682e6ff3);
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],56,0x748f82ee);
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],57,0x78a5636f);
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],58,0x84c87814);
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],59,0x8cc70208);
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],60,0x90befffa);
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],61,0xa4506ceb);
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],62,0xbef9a3f7);
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],63,0xc67178f2);
+
+ dig = []
+ for i, x in enumerate(sha_info['digest']):
+ dig.append( (x + ss[i]) & 0xffffffff )
+ sha_info['digest'] = dig
+
+def sha_init():
+ sha_info = new_shaobject()
+ sha_info['digest'] = [0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19]
+ sha_info['count_lo'] = 0
+ sha_info['count_hi'] = 0
+ sha_info['local'] = 0
+ sha_info['digestsize'] = 32
+ return sha_info
+
+def sha224_init():
+ sha_info = new_shaobject()
+ sha_info['digest'] = [0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4]
+ sha_info['count_lo'] = 0
+ sha_info['count_hi'] = 0
+ sha_info['local'] = 0
+ sha_info['digestsize'] = 28
+ return sha_info
+
+def getbuf(s):
+ if isinstance(s, str):
+ return s
+ elif isinstance(s, unicode):
+ return str(s)
+ else:
+ return buffer(s)
+
+def sha_update(sha_info, buffer):
+ count = len(buffer)
+ buffer_idx = 0
+ clo = (sha_info['count_lo'] + (count << 3)) & 0xffffffff
+ if clo < sha_info['count_lo']:
+ sha_info['count_hi'] += 1
+ sha_info['count_lo'] = clo
+
+ sha_info['count_hi'] += (count >> 29)
+
+ if sha_info['local']:
+ i = SHA_BLOCKSIZE - sha_info['local']
+ if i > count:
+ i = count
+
+ # copy buffer
+ for x in enumerate(buffer[buffer_idx:buffer_idx+i]):
+ sha_info['data'][sha_info['local']+x[0]] = struct.unpack('B', x[1])[0]
+
+ count -= i
+ buffer_idx += i
+
+ sha_info['local'] += i
+ if sha_info['local'] == SHA_BLOCKSIZE:
+ sha_transform(sha_info)
+ sha_info['local'] = 0
+ else:
+ return
+
+ while count >= SHA_BLOCKSIZE:
+ # copy buffer
+ sha_info['data'] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + SHA_BLOCKSIZE]]
+ count -= SHA_BLOCKSIZE
+ buffer_idx += SHA_BLOCKSIZE
+ sha_transform(sha_info)
+
+
+ # copy buffer
+ pos = sha_info['local']
+ sha_info['data'][pos:pos+count] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + count]]
+ sha_info['local'] = count
+
+def sha_final(sha_info):
+ lo_bit_count = sha_info['count_lo']
+ hi_bit_count = sha_info['count_hi']
+ count = (lo_bit_count >> 3) & 0x3f
+ sha_info['data'][count] = 0x80;
+ count += 1
+ if count > SHA_BLOCKSIZE - 8:
+ # zero the bytes in data after the count
+ sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
+ sha_transform(sha_info)
+ # zero bytes in data
+ sha_info['data'] = [0] * SHA_BLOCKSIZE
+ else:
+ sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
+
+ sha_info['data'][56] = (hi_bit_count >> 24) & 0xff
+ sha_info['data'][57] = (hi_bit_count >> 16) & 0xff
+ sha_info['data'][58] = (hi_bit_count >> 8) & 0xff
+ sha_info['data'][59] = (hi_bit_count >> 0) & 0xff
+ sha_info['data'][60] = (lo_bit_count >> 24) & 0xff
+ sha_info['data'][61] = (lo_bit_count >> 16) & 0xff
+ sha_info['data'][62] = (lo_bit_count >> 8) & 0xff
+ sha_info['data'][63] = (lo_bit_count >> 0) & 0xff
+
+ sha_transform(sha_info)
+
+ dig = []
+ for i in sha_info['digest']:
+ dig.extend([ ((i>>24) & 0xff), ((i>>16) & 0xff), ((i>>8) & 0xff), (i & 0xff) ])
+ return ''.join([chr(i) for i in dig])
+
+class sha256(object):
+ digest_size = digestsize = SHA_DIGESTSIZE
+ block_size = SHA_BLOCKSIZE
+
+ def __init__(self, s=None):
+ self._sha = sha_init()
+ if s:
+ sha_update(self._sha, getbuf(s))
+
+ def update(self, s):
+ sha_update(self._sha, getbuf(s))
+
+ def digest(self):
+ return sha_final(self._sha.copy())[:self._sha['digestsize']]
+
+ def hexdigest(self):
+ return ''.join(['%.2x' % ord(i) for i in self.digest()])
+
+ def copy(self):
+ new = sha256.__new__(sha256)
+ new._sha = self._sha.copy()
+ return new
+
+class sha224(sha256):
+ digest_size = digestsize = 28
+
+ def __init__(self, s=None):
+ self._sha = sha224_init()
+ if s:
+ sha_update(self._sha, getbuf(s))
+
+ def copy(self):
+ new = sha224.__new__(sha224)
+ new._sha = self._sha.copy()
+ return new
+
+if __name__ == "__main__":
+ a_str = "just a test string"
+
+ assert 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' == sha256().hexdigest()
+ assert 'd7b553c6f09ac85d142415f857c5310f3bbbe7cdd787cce4b985acedd585266f' == sha256(a_str).hexdigest()
+ assert '8113ebf33c97daa9998762aacafe750c7cefc2b2f173c90c59663a57fe626f21' == sha256(a_str*7).hexdigest()
+
+ s = sha256(a_str)
+ s.update(a_str)
+ assert '03d9963e05a094593190b6fc794cb1a3e1ac7d7883f0b5855268afeccc70d461' == s.hexdigest()
diff --git a/setuptools/_backport/hashlib/_sha512.py b/setuptools/_backport/hashlib/_sha512.py
new file mode 100644
index 00000000..68ff46f3
--- /dev/null
+++ b/setuptools/_backport/hashlib/_sha512.py
@@ -0,0 +1,288 @@
+"""
+This code was Ported from CPython's sha512module.c
+"""
+
+import struct
+
+SHA_BLOCKSIZE = 128
+SHA_DIGESTSIZE = 64
+
+
+def new_shaobject():
+ return {
+ 'digest': [0]*8,
+ 'count_lo': 0,
+ 'count_hi': 0,
+ 'data': [0]* SHA_BLOCKSIZE,
+ 'local': 0,
+ 'digestsize': 0
+ }
+
+ROR64 = lambda x, y: (((x & 0xffffffffffffffff) >> (y & 63)) | (x << (64 - (y & 63)))) & 0xffffffffffffffff
+Ch = lambda x, y, z: (z ^ (x & (y ^ z)))
+Maj = lambda x, y, z: (((x | y) & z) | (x & y))
+S = lambda x, n: ROR64(x, n)
+R = lambda x, n: (x & 0xffffffffffffffff) >> n
+Sigma0 = lambda x: (S(x, 28) ^ S(x, 34) ^ S(x, 39))
+Sigma1 = lambda x: (S(x, 14) ^ S(x, 18) ^ S(x, 41))
+Gamma0 = lambda x: (S(x, 1) ^ S(x, 8) ^ R(x, 7))
+Gamma1 = lambda x: (S(x, 19) ^ S(x, 61) ^ R(x, 6))
+
+def sha_transform(sha_info):
+ W = []
+
+ d = sha_info['data']
+ for i in xrange(0,16):
+ W.append( (d[8*i]<<56) + (d[8*i+1]<<48) + (d[8*i+2]<<40) + (d[8*i+3]<<32) + (d[8*i+4]<<24) + (d[8*i+5]<<16) + (d[8*i+6]<<8) + d[8*i+7])
+
+ for i in xrange(16,80):
+ W.append( (Gamma1(W[i - 2]) + W[i - 7] + Gamma0(W[i - 15]) + W[i - 16]) & 0xffffffffffffffff )
+
+ ss = sha_info['digest'][:]
+
+ def RND(a,b,c,d,e,f,g,h,i,ki):
+ t0 = (h + Sigma1(e) + Ch(e, f, g) + ki + W[i]) & 0xffffffffffffffff
+ t1 = (Sigma0(a) + Maj(a, b, c)) & 0xffffffffffffffff
+ d = (d + t0) & 0xffffffffffffffff
+ h = (t0 + t1) & 0xffffffffffffffff
+ return d & 0xffffffffffffffff, h & 0xffffffffffffffff
+
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],0,0x428a2f98d728ae22)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],1,0x7137449123ef65cd)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],2,0xb5c0fbcfec4d3b2f)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],3,0xe9b5dba58189dbbc)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],4,0x3956c25bf348b538)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],5,0x59f111f1b605d019)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],6,0x923f82a4af194f9b)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],7,0xab1c5ed5da6d8118)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],8,0xd807aa98a3030242)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],9,0x12835b0145706fbe)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],10,0x243185be4ee4b28c)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],11,0x550c7dc3d5ffb4e2)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],12,0x72be5d74f27b896f)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],13,0x80deb1fe3b1696b1)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],14,0x9bdc06a725c71235)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],15,0xc19bf174cf692694)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],16,0xe49b69c19ef14ad2)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],17,0xefbe4786384f25e3)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],18,0x0fc19dc68b8cd5b5)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],19,0x240ca1cc77ac9c65)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],20,0x2de92c6f592b0275)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],21,0x4a7484aa6ea6e483)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],22,0x5cb0a9dcbd41fbd4)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],23,0x76f988da831153b5)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],24,0x983e5152ee66dfab)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],25,0xa831c66d2db43210)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],26,0xb00327c898fb213f)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],27,0xbf597fc7beef0ee4)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],28,0xc6e00bf33da88fc2)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],29,0xd5a79147930aa725)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],30,0x06ca6351e003826f)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],31,0x142929670a0e6e70)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],32,0x27b70a8546d22ffc)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],33,0x2e1b21385c26c926)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],34,0x4d2c6dfc5ac42aed)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],35,0x53380d139d95b3df)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],36,0x650a73548baf63de)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],37,0x766a0abb3c77b2a8)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],38,0x81c2c92e47edaee6)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],39,0x92722c851482353b)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],40,0xa2bfe8a14cf10364)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],41,0xa81a664bbc423001)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],42,0xc24b8b70d0f89791)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],43,0xc76c51a30654be30)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],44,0xd192e819d6ef5218)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],45,0xd69906245565a910)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],46,0xf40e35855771202a)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],47,0x106aa07032bbd1b8)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],48,0x19a4c116b8d2d0c8)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],49,0x1e376c085141ab53)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],50,0x2748774cdf8eeb99)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],51,0x34b0bcb5e19b48a8)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],52,0x391c0cb3c5c95a63)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],53,0x4ed8aa4ae3418acb)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],54,0x5b9cca4f7763e373)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],55,0x682e6ff3d6b2b8a3)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],56,0x748f82ee5defb2fc)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],57,0x78a5636f43172f60)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],58,0x84c87814a1f0ab72)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],59,0x8cc702081a6439ec)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],60,0x90befffa23631e28)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],61,0xa4506cebde82bde9)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],62,0xbef9a3f7b2c67915)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],63,0xc67178f2e372532b)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],64,0xca273eceea26619c)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],65,0xd186b8c721c0c207)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],66,0xeada7dd6cde0eb1e)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],67,0xf57d4f7fee6ed178)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],68,0x06f067aa72176fba)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],69,0x0a637dc5a2c898a6)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],70,0x113f9804bef90dae)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],71,0x1b710b35131c471b)
+ ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],72,0x28db77f523047d84)
+ ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],73,0x32caab7b40c72493)
+ ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],74,0x3c9ebe0a15c9bebc)
+ ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],75,0x431d67c49c100d4c)
+ ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],76,0x4cc5d4becb3e42b6)
+ ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],77,0x597f299cfc657e2a)
+ ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],78,0x5fcb6fab3ad6faec)
+ ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],79,0x6c44198c4a475817)
+
+ dig = []
+ for i, x in enumerate(sha_info['digest']):
+ dig.append( (x + ss[i]) & 0xffffffffffffffff )
+ sha_info['digest'] = dig
+
+def sha_init():
+ sha_info = new_shaobject()
+ sha_info['digest'] = [ 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, 0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179]
+ sha_info['count_lo'] = 0
+ sha_info['count_hi'] = 0
+ sha_info['local'] = 0
+ sha_info['digestsize'] = 64
+ return sha_info
+
+def sha384_init():
+ sha_info = new_shaobject()
+ sha_info['digest'] = [ 0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939, 0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4]
+ sha_info['count_lo'] = 0
+ sha_info['count_hi'] = 0
+ sha_info['local'] = 0
+ sha_info['digestsize'] = 48
+ return sha_info
+
+def getbuf(s):
+ if isinstance(s, str):
+ return s
+ elif isinstance(s, unicode):
+ return str(s)
+ else:
+ return buffer(s)
+
+def sha_update(sha_info, buffer):
+ count = len(buffer)
+ buffer_idx = 0
+ clo = (sha_info['count_lo'] + (count << 3)) & 0xffffffff
+ if clo < sha_info['count_lo']:
+ sha_info['count_hi'] += 1
+ sha_info['count_lo'] = clo
+
+ sha_info['count_hi'] += (count >> 29)
+
+ if sha_info['local']:
+ i = SHA_BLOCKSIZE - sha_info['local']
+ if i > count:
+ i = count
+
+ # copy buffer
+ for x in enumerate(buffer[buffer_idx:buffer_idx+i]):
+ sha_info['data'][sha_info['local']+x[0]] = struct.unpack('B', x[1])[0]
+
+ count -= i
+ buffer_idx += i
+
+ sha_info['local'] += i
+ if sha_info['local'] == SHA_BLOCKSIZE:
+ sha_transform(sha_info)
+ sha_info['local'] = 0
+ else:
+ return
+
+ while count >= SHA_BLOCKSIZE:
+ # copy buffer
+ sha_info['data'] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + SHA_BLOCKSIZE]]
+ count -= SHA_BLOCKSIZE
+ buffer_idx += SHA_BLOCKSIZE
+ sha_transform(sha_info)
+
+ # copy buffer
+ pos = sha_info['local']
+ sha_info['data'][pos:pos+count] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + count]]
+ sha_info['local'] = count
+
+def sha_final(sha_info):
+ lo_bit_count = sha_info['count_lo']
+ hi_bit_count = sha_info['count_hi']
+ count = (lo_bit_count >> 3) & 0x7f
+ sha_info['data'][count] = 0x80;
+ count += 1
+ if count > SHA_BLOCKSIZE - 16:
+ # zero the bytes in data after the count
+ sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
+ sha_transform(sha_info)
+ # zero bytes in data
+ sha_info['data'] = [0] * SHA_BLOCKSIZE
+ else:
+ sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
+
+ sha_info['data'][112] = 0;
+ sha_info['data'][113] = 0;
+ sha_info['data'][114] = 0;
+ sha_info['data'][115] = 0;
+ sha_info['data'][116] = 0;
+ sha_info['data'][117] = 0;
+ sha_info['data'][118] = 0;
+ sha_info['data'][119] = 0;
+
+ sha_info['data'][120] = (hi_bit_count >> 24) & 0xff
+ sha_info['data'][121] = (hi_bit_count >> 16) & 0xff
+ sha_info['data'][122] = (hi_bit_count >> 8) & 0xff
+ sha_info['data'][123] = (hi_bit_count >> 0) & 0xff
+ sha_info['data'][124] = (lo_bit_count >> 24) & 0xff
+ sha_info['data'][125] = (lo_bit_count >> 16) & 0xff
+ sha_info['data'][126] = (lo_bit_count >> 8) & 0xff
+ sha_info['data'][127] = (lo_bit_count >> 0) & 0xff
+
+ sha_transform(sha_info)
+
+ dig = []
+ for i in sha_info['digest']:
+ dig.extend([ ((i>>56) & 0xff), ((i>>48) & 0xff), ((i>>40) & 0xff), ((i>>32) & 0xff), ((i>>24) & 0xff), ((i>>16) & 0xff), ((i>>8) & 0xff), (i & 0xff) ])
+ return ''.join([chr(i) for i in dig])
+
+class sha512(object):
+ digest_size = digestsize = SHA_DIGESTSIZE
+ block_size = SHA_BLOCKSIZE
+
+ def __init__(self, s=None):
+ self._sha = sha_init()
+ if s:
+ sha_update(self._sha, getbuf(s))
+
+ def update(self, s):
+ sha_update(self._sha, getbuf(s))
+
+ def digest(self):
+ return sha_final(self._sha.copy())[:self._sha['digestsize']]
+
+ def hexdigest(self):
+ return ''.join(['%.2x' % ord(i) for i in self.digest()])
+
+ def copy(self):
+ new = sha512.__new__(sha512)
+ new._sha = self._sha.copy()
+ return new
+
+class sha384(sha512):
+ digest_size = digestsize = 48
+
+ def __init__(self, s=None):
+ self._sha = sha384_init()
+ if s:
+ sha_update(self._sha, getbuf(s))
+
+ def copy(self):
+ new = sha384.__new__(sha384)
+ new._sha = self._sha.copy()
+ return new
+
+if __name__ == "__main__":
+ a_str = "just a test string"
+
+ assert sha512().hexdigest() == "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
+ assert sha512(a_str).hexdigest() == "68be4c6664af867dd1d01c8d77e963d87d77b702400c8fabae355a41b8927a5a5533a7f1c28509bbd65c5f3ac716f33be271fbda0ca018b71a84708c9fae8a53"
+ assert sha512(a_str*7).hexdigest() == "3233acdbfcfff9bff9fc72401d31dbffa62bd24e9ec846f0578d647da73258d9f0879f7fde01fe2cc6516af3f343807fdef79e23d696c923d79931db46bf1819"
+
+ s = sha512(a_str)
+ s.update(a_str)
+ assert s.hexdigest() == "341aeb668730bbb48127d5531115f3c39d12cb9586a6ca770898398aff2411087cfe0b570689adf328cddeb1f00803acce6737a19f310b53bbdb0320828f75bb"
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 61a66c6d..47f00c00 100755
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -11,11 +11,8 @@ from setuptools.compat import (urllib2, httplib, StringIO, HTTPError,
url2pathname, name2codepoint,
unichr, urljoin)
from setuptools.compat import filterfalse
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
from fnmatch import translate
+from setuptools.py24compat import hashlib
from setuptools.py24compat import wraps
from setuptools.py27compat import get_all_headers
@@ -195,6 +192,76 @@ user_agent = "Python-urllib/%s setuptools/%s" % (
sys.version[:3], require('setuptools')[0].version
)
+class ContentChecker(object):
+ """
+ A null content checker that defines the interface for checking content
+ """
+ def feed(self, block):
+ """
+ Feed a block of data to the hash.
+ """
+ return
+
+ def is_valid(self):
+ """
+ Check the hash. Return False if validation fails.
+ """
+ return True
+
+ def report(self, reporter, template):
+ """
+ Call reporter with information about the checker (hash name)
+ substituted into the template.
+ """
+ return
+
+class HashChecker(ContentChecker):
+ pattern = re.compile(
+ r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
+ r'(?P<expected>[a-f0-9]+)'
+ )
+
+ def __init__(self, hash_name, expected):
+ self.hash = hashlib.new(hash_name)
+ self.expected = expected
+
+ @classmethod
+ def from_url(cls, url):
+ "Construct a (possibly null) ContentChecker from a URL"
+ fragment = urlparse(url)[-1]
+ if not fragment:
+ return ContentChecker()
+ match = cls.pattern.search(fragment)
+ if not match:
+ return ContentChecker()
+ return cls(**match.groupdict())
+
+ def feed(self, block):
+ self.hash.update(block)
+
+ def is_valid(self):
+ return self.hash.hexdigest() == self.expected
+
+ def _get_hash_name(self):
+ """
+ Python 2.4 implementation of MD5 doesn't supply a .name attribute
+ so provide that name.
+
+ When Python 2.4 is no longer required, replace invocations of this
+ method with simply 'self.hash.name'.
+ """
+ try:
+ return self.hash.name
+ except AttributeError:
+ if 'md5' in str(type(self.hash)):
+ return 'md5'
+ raise
+
+ def report(self, reporter, template):
+ msg = template % self._get_hash_name()
+ return reporter(msg)
+
+
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
@@ -387,16 +454,20 @@ class PackageIndex(Environment):
- def check_md5(self, cs, info, filename, tfp):
- if re.match('md5=[0-9a-f]{32}$', info):
- self.debug("Validating md5 checksum for %s", filename)
- if cs.hexdigest() != info[4:]:
- tfp.close()
- os.unlink(filename)
- raise DistutilsError(
- "MD5 validation failed for "+os.path.basename(filename)+
- "; possible download problem?"
- )
+ def check_hash(self, checker, filename, tfp):
+ """
+ checker is a ContentChecker
+ """
+ checker.report(self.debug,
+ "Validating %%s checksum for %s" % filename)
+ if not checker.is_valid():
+ tfp.close()
+ os.unlink(filename)
+ raise DistutilsError(
+ "%s validation failed for %s; "
+ "possible download problem?" % (
+ checker.hash.name, os.path.basename(filename))
+ )
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
@@ -600,14 +671,12 @@ class PackageIndex(Environment):
# Download the file
fp, tfp, info = None, None, None
try:
- if '#' in url:
- url, info = url.split('#', 1)
+ checker = HashChecker.from_url(url)
fp = self.open_url(url)
if isinstance(fp, HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
)
- cs = md5()
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
@@ -621,13 +690,13 @@ class PackageIndex(Environment):
while True:
block = fp.read(bs)
if block:
- cs.update(block)
+ checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
- if info: self.check_md5(cs, info, filename, tfp)
+ self.check_hash(checker, filename, tfp)
return headers
finally:
if fp: fp.close()
@@ -636,7 +705,6 @@ class PackageIndex(Environment):
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
-
def open_url(self, url, warning=None):
if url.startswith('file:'):
return local_open(url)
diff --git a/setuptools/py24compat.py b/setuptools/py24compat.py
index c5d7d204..40e9ae0f 100644
--- a/setuptools/py24compat.py
+++ b/setuptools/py24compat.py
@@ -9,3 +9,9 @@ except ImportError:
def wraps(func):
"Just return the function unwrapped"
return lambda x: x
+
+
+try:
+ import hashlib
+except ImportError:
+ from setuptools._backport import hashlib
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 4e527446..29fc07b8 100755
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -270,11 +270,11 @@ class DirectorySandbox(AbstractSandbox):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
- def open(self, file, flags, mode=0x1FF): # 0777
+ def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
- self._violation("os.open", file, flags, mode)
- return _os.open(file,flags,mode)
+ self._violation("os.open", file, flags, mode, *args, **kw)
+ return _os.open(file,flags,mode, *args, **kw)
WRITE_FLAGS = reduce(
operator.or_, [getattr(_os, a, 0) for a in
diff --git a/setuptools/ssl_support.py b/setuptools/ssl_support.py
index 2aec655a..f8a780a9 100644
--- a/setuptools/ssl_support.py
+++ b/setuptools/ssl_support.py
@@ -88,9 +88,16 @@ except ImportError:
class CertificateError(ValueError):
pass
- def _dnsname_to_pat(dn):
+ def _dnsname_to_pat(dn, max_wildcards=1):
pats = []
for frag in dn.split(r'.'):
+ if frag.count('*') > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survery of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
diff --git a/setuptools/tests/test_markerlib.py b/setuptools/tests/test_markerlib.py
index aa461846..dae71cba 100644
--- a/setuptools/tests/test_markerlib.py
+++ b/setuptools/tests/test_markerlib.py
@@ -19,20 +19,24 @@ class TestMarkerlib(unittest.TestCase):
self.assertTrue(interpret(""))
self.assertTrue(interpret("os.name != 'buuuu'"))
+ self.assertTrue(interpret("os_name != 'buuuu'"))
self.assertTrue(interpret("python_version > '1.0'"))
self.assertTrue(interpret("python_version < '5.0'"))
self.assertTrue(interpret("python_version <= '5.0'"))
self.assertTrue(interpret("python_version >= '1.0'"))
self.assertTrue(interpret("'%s' in os.name" % os_name))
+ self.assertTrue(interpret("'%s' in os_name" % os_name))
self.assertTrue(interpret("'buuuu' not in os.name"))
self.assertFalse(interpret("os.name == 'buuuu'"))
+ self.assertFalse(interpret("os_name == 'buuuu'"))
self.assertFalse(interpret("python_version < '1.0'"))
self.assertFalse(interpret("python_version > '5.0'"))
self.assertFalse(interpret("python_version >= '5.0'"))
self.assertFalse(interpret("python_version <= '1.0'"))
self.assertFalse(interpret("'%s' not in os.name" % os_name))
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))
+ self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'"))
environment = default_environment()
environment['extra'] = 'test'
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index 92d1e2e0..3791914a 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -141,3 +141,42 @@ class TestPackageIndex(unittest.TestCase):
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
+
+class TestContentCheckers(unittest.TestCase):
+
+ def test_md5(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ checker.feed('You should probably not be using MD5'.encode('ascii'))
+ self.assertEqual(checker.hash.hexdigest(),
+ 'f12895fdffbd45007040d2e44df98478')
+ self.assertTrue(checker.is_valid())
+
+ def test_other_fragment(self):
+ "Content checks should succeed silently if no hash is present"
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#something%20completely%20different')
+ checker.feed('anything'.encode('ascii'))
+ self.assertTrue(checker.is_valid())
+
+ def test_blank_md5(self):
+ "Content checks should succeed if a hash is empty"
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=')
+ checker.feed('anything'.encode('ascii'))
+ self.assertTrue(checker.is_valid())
+
+ def test_get_hash_name_md5(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ if sys.version_info >= (2,5):
+ self.assertEqual(checker.hash.name, 'md5')
+ else:
+ # Python 2.4 compatability
+ self.assertEqual(checker._get_hash_name(), 'md5')
+
+ def test_report(self):
+ checker = setuptools.package_index.HashChecker.from_url(
+ 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
+ rep = checker.report(lambda x: x, 'My message about %s')
+ self.assertEqual(rep, 'My message about md5')
diff --git a/tests/test_pkg_resources.py b/tests/test_pkg_resources.py
index b05ea44b..dfa27120 100644
--- a/tests/test_pkg_resources.py
+++ b/tests/test_pkg_resources.py
@@ -56,7 +56,7 @@ class TestZipProvider(object):
zp = pkg_resources.ZipProvider(mod)
filename = zp.get_resource_filename(manager, 'data.dat')
assert os.stat(filename).st_mtime == 1368379500
- f = open(filename, 'wb')
+ f = open(filename, 'w')
f.write('hello, world?')
f.close()
os.utime(filename, (1368379500, 1368379500))
@@ -64,3 +64,11 @@ class TestZipProvider(object):
f = open(filename)
assert f.read() == 'hello, world!'
manager.cleanup_resources()
+
+class TestResourceManager(object):
+ def test_get_cache_path(self):
+ mgr = pkg_resources.ResourceManager()
+ path = mgr.get_cache_path('foo')
+ type_ = str(type(path))
+ message = "Unexpected type from get_cache_path: " + type_
+ assert isinstance(path, (unicode, str)), message