diff options
34 files changed, 1530 insertions, 823 deletions
diff --git a/.hgignore b/.hgignore deleted file mode 100644 index ebc53b33..00000000 --- a/.hgignore +++ /dev/null @@ -1,15 +0,0 @@ -syntax: glob -bin -build -dist -include -lib -distribute.egg-info -setuptools.egg-info -.coverage -.tox -*.egg -*.py[cod] -*.swp -*~ -.git* diff --git a/.hgtags b/.hgtags deleted file mode 100644 index 6e59e318..00000000 --- a/.hgtags +++ /dev/null @@ -1,306 +0,0 @@ -7e9441311eb21dd1fbc32cfbad58168e46c5450e 0.6 -26f429772565f69d1f6d21adf57c3d8c40197129 0.6.1 -6f46749a7454be6e044a54cd73c51318b74bdee8 0.6.2 -34b80fb58862d18f8f957f98a883ed4a72d06f8e 0.6.3 -fb04abddb50d82a9005c9082c94d5eb983be1d79 0.6.4 -8ae0bd250b4a0d58cbaf16b4354ad60f73f24a01 0.6.5 -88847883dfed39829d3a5ed292ad540723ad31cc 0.6.6 -fcbef325349ada38f6c674eb92db82664cf6437c 0.6.7 -3af7f2b8270b9bb34fb65f08ee567bfe8e2a6a5a 0.6.8 -669725d03fd1e345ea47590e9b14cb19742b96a2 0.6.9 -eff3ca9c2d8d39e24c221816c52a37f964535336 0.6.10 -88710e34b91c98c9348749722cce3acd574d177d 0.6.11 -5ce754773a43ac21f7bd13872f45c75e27b593f8 0.6.12 -de36566d35e51bee7cfc86ffa694795e52f4147c 0.6.13 -e5f3f0ffe9e1a243d49a06f26c79dd160f521483 0.6.14 -dc03a300ec7a89ad773047172d43e52b34e7cd1e 0.6.15 -e620fb4ee8ba17debadb614fb583c6dfac229dea 0.6.16 -21df276275b5a47c6a994927d69ad3d90cf62b5d 0.6.17 -e9264ca4ba8c24239c36a8426a0394f7c7d5dd83 0.6.18 -aed31b1fa47ed1f39e55c75b76bbbdb80775b7f1 0.6.19 -c6e6273587816c3e486ef7739e53c864a0145251 0.6.20 -7afdf4c84a713fe151e6163ab25d45e8727ce653 0.6.21 -105066342777cd1319a95d7ae0271a2ea1ac33fe 0.6.23 -7b5ef4e6c80e82541dffb5a9a130d81550d5a835 0.6.24 -9c014a80f32e532371826ed1dc3236975f37f371 0.6.25 -ff8c4d6c8e5d2093750a58a3d43b76556570007c 0.6.26 -2a5c42ed097a195e398b97261c40cd66c8da8913 0.6.27 -4ed34b38851f90278cfe2bff75784f7e32883725 0.6.28 -acecfa2cfb6fca207dd2f4e025c695def3bb6b40 0.6.29 -e950f50addff150859f5990b9df2a33c691b6354 0.6.30 -06dae3faee2de50ff17b90719df410b2ebc5b71e 0.6.31 -1f4f79258ed5b418f680a55d3006f41aa6a56d2b 0.6.32 -89f57bf1406a5e745470af35446902c21ac9b6f6 0.6.33 -3c8f9fc13862124cf20ef2ff2140254fb272bb94 0.6.34 -7c3f8b9eb7cfa17481c835d5caaa918d337c7a83 0.6.35 -192094c0d1e2e5d2cb5c718f84a36c9de04b314b 0.6.36 -66d4e3b8899166e4c04189ee1831c649b7ff38bf 0.6.37 -398d58aa8bba33778c30ce72055a27d4b425809c 0.6.38 -f457fc2a3ebe609d8ca7a869eb65b7506ecf49ef 0.6.39 -9b2e2aa06e058c63e06c5e42a7f279ddae2dfb7d 0.7b1 -9089a40343981baa593b9bb5953f9088e9507099 0.6.40 -ad107e9b4beea24516ac4e1e854696e586fe279d 0.6.41 -f30167716b659f96c5e0b7ea3d5be2bcff8c0eac 0.6.42 -8951daac6c1bc7b24c7fb054fd369f2c5b88cdb3 0.7b2 -35086ee286732b0f63d2be18d9f26f2734586e2d 0.6.43 -63e4eb2d61204f77f9b557201a0efa187b05a611 0.7b3 -73aa98aee6bbc4a9d19a334a8ac928dece7799c6 0.6.44 -53b4ac9a748aa28893aaca42c41e5e99568667bb 0.7b4 -ddca71ae5ceb9b14512dc60ea83802c10e224cf0 0.6.45 -7f2c08e9ca22023d1499c512fccc1513813b7dc4 0.7 -024dd30ed702135f5328975042566e48cc479d7d 0.7.1 -d04c05f035e3a5636006fc34f4be7e6c77035d17 0.7.2 -d212e48e0cef689acba57ed017289c027660b23c 0.7.3 -74c6c12268059986f9cc0b535399594f1d131201 0.8b1 -85640475dda0621f20e11db0995fa07f51744a98 0.7.4 -b57e5ba934767dd498669b17551678081b3047b5 0.6.46 -dd5bbc116c53d3732d22f983e7ca6d8cfabd3b08 0.7.5 -512744f3f306aea0fdde4cfd600af8b2d6e773e7 0.8b2 -8af9839a76407eebf3610fcd3e7973f1625abaa2 0.8b3 -ee2c967017024197b38e39ced852808265387a4b 0.6.47 -48d3d26cbea68e21c96e51f01092e8fdead5cd60 0.7.6 -5b3c7981a02b4a86af1b10ae16492899b515d485 0.8b4 -cae9127e0534fc46d7ddbc11f68dc88fd9311459 0.6.48 -1506fa538fff01e70424530a32a44e070720cf3c 0.7.7 -5679393794978a1d3e1e087472b8a0fdf3d8423c 0.8b5 -26f59ec0f0f69714d28a891aaad048e3b9fcd6f7 0.8b6 -f657df1f1ed46596d236376649c99a470662b4ba 0.6.49 -236de1de68b14230036147c7c9e7c09b215b53ee 0.7.8 -979d598822bc64b05fb177a2ba221e75ee5b44d3 0.8b7 -e3d70539e79f39a97f69674ab038661961a1eb43 0.8 -3078b1e566399bf0c5590f3528df03d0c23a0777 0.9 -9e5a8f734662dd36e6fd6e4ba9031d0e2d294632 0.9.1 -37444bb32e172aaacbc0aeafdf5a778ee471723d 0.9.2 -3e9d2e89de3aa499382d6be2ec8b64d2a29f7f13 0.9.3 -1aef141fc968113e4c521d1edf6ea863c4ff7e00 0.9.4 -88e3d6788facbb2dd6467a23c4f35529a5ce20a1 0.9.5 -acc6c5d61d0f82040c237ac7ea010c0fc9e67d66 0.9.6 -19965a03c1d5231c894e0fabfaf45af1fd99f484 0.9.7 -e0a6e225ad6b28471cd42cfede6e8a334bb548fb 0.9.8 -7b91ff93a30ef78634b7bb34f4a6229a5de281ee 1.0b1 -aba16323ec9382da7bc77c633990ccb3bd58d050 1.0b2 -8a98492f0d852402c93ddbbf3f07081909a9105f 1.0b3 -c385fdf1f976fb1d2a6accc9292d8eca419180fa 1.0 -d943b67fe80dbd61326014e4acedfc488adfa1c9 1.1 -2e42e86546100c9f6845b04eb31b75c5add05f78 1.1.1 -462fe5ccd8befeb2a235e8295d6d73eb3a49cc78 1.1.2 -ddf3561d6a54087745f4bf6ea2048b86195d6fe2 1.1.3 -f94c7e4fa03077e069c1c3cef93ead735559e706 1.1.4 -d9bb58331007ee3f69d31983a180f56b15c731c3 1.1.5 -5e426bdeb46b87e299422adc419f4163b6c78d13 1.1.6 -cc9b19cd0ec64e44308a852e9b9fdc6026ea2e46 1.1.7 -4c7dc4ae2440ae3e9ba26b4a12ffca3407e7030d 1.2b1 -77921bbe3931caf40474dc36e55d3d541981c749 1.2 -19873119647deae8a68e9ed683317b9ee170a8d8 1.3 -a197b626075a8c2e393a08c42a20bd2624a41092 1.3.1 -076b472a9e3f840021e9d5509878337e6e5fcd89 1.3.2 -0d1bdb99a535a2c7ed4edd37141fb0b54348b713 1.4b1 -a13f8c18ce742bc83c794b9eea57980cb94ae18a 1.4 -9a5f26d7df8ef779cb5f40cc0389343fb4c61365 1.4.1 -274cb3beba4f22d5f461b0578b6d56e171d94f2e 1.4.2 -0bb1df93c2eaa50e95ccfce18208b0cca20ebae3 2.0 -bbdba51e1bc1779728ed351529252f73543ace65 2.0.1 -5a62ac60ba31d249db1cfcff31d85ca26421be6d 2.0.2 -c49c651997ebec3b40b71139e8a6a6a15c62c848 2.1 -b5be6c2b828cb92d27f52fccc725ce86a37e9ce0 2.1.1 -ab1c2a26e06f2a2006e8e867e4d41ccf1d6cf9b2 2.2b1 -caab085e829f29679d0e47430b2761af6b20fc76 2.1.2 -39f7ef5ef22183f3eba9e05a46068e1d9fd877b0 2.2 -faba785e9b9e05ba890d0851ef1f3287c32fcac2 3.0b1 -8e8c50925f18eafb7e66fe020aa91a85b9a4b122 3.0 -cd9e857476ac70515f7436f846b593f696ac672d 3.0.1 -bad1f30ee0dfa7a2af4f428d06f62efa39ca48db 3.0.2 -47224d55ddc6bb08c1d17a219f124d0d9c524491 3.1 -07c459bea1c58ff52e0576fc29c1865d18a83b09 3.2 -b306e681a945406833fb297ae10241e2241fc22b 3.3 -78c8cfbe3e1017d1653c48f7306b2c4b4911bf1a 4.0b1 -5cb90066d98700e6d37a01d95c4a2090e730ae02 3.4 -e39de2d3eb774b70c023a1151758213cc9ed2178 3.4.1 -369f6f90f69683702cc0b72827ccf949977808b0 3.4.2 -06a56e063c327b0606f9e9690764279d424646b2 3.4.3 -0917d575d26091a184796624743825914994bf95 3.4.4 -98f29d521c3a57bae0090d2bc5597d93db95b108 3.5 -254d8c625f4620993ce2d2b21212ba01cf307fe6 3.5.1 -572201d08eadc59210f6f0f28f9dc79f906672d3 3.5.2 -e94e768594a1405efde0b79cc60549dd8a4cda9a 3.6 -292dfca15d33e72a862d044183a6ad7c06862a19 3.7b1 -49bd27eebf212c067392796bb2d0fa6d8e583586 3.7 -2fa97c06cc013a9c82f4c1219711e72238d5b6e6 3.8 -9b422fc0b8b97cdb62f02d754283f747adef7f83 3.7.1 -40744de29b848f0e88139ba91d645c08a56855e9 3.8.1 -84d936fd18a93d16c46e68ee2e39f5733f3cd863 5.0 -871bd7b4326f48860ebe0baccdaea8fe4f8f8583 5.0.1 -95996b713722376679c3168b15ab12ea8360dd5f 5.0.2 -3a948b6d01e3449b478fcdc532c44eb3cea5ee10 5.1 -f493e6c4ffd88951871110858c141385305e0077 5.2 -1f9505cfd7524ce0c83ab31d139f47b39c56ccbe 5.3 -baae103e80c307008b156e426a07eb9f486eb4f0 5.4 -ba3b08c7bffd6123e1a7d58994f15e8051a67cb7 5.4.1 -7adcf1397f6eccb9e73eda294343de2943f7c8fb 5.4.2 -68910a89f97a508a64f9f235dc64ad43d4477ea0 5.5 -949a66af4f03521e1404deda940aa951418a13d2 5.5.1 -a1fc0220bfa3581158688789f6dfdc00672eb99b 5.6 -37ed55fd310d0cd32009dc5676121e86b404a23d 5.7 -67550a8ed9f4ef49ee5a31f433adbf5a0eaeccf9 5.8 -755cbfd3743ffb186cdf7e20be8e61dbdaa22503 6.0 -bc6655b4acf205dd9f25c702955645656077398a 6.0.1 -1ae2a75724bbba56373784f185a7f235ed0f24a4 6.0.2b1 -01271e84e5125fcc4f0f368a6e21116a5722953c 6.0.2 -7ea80190d494a766c6356fce85c844703964b6cc 6.1 -df26609c2f614f5fc9110342e4003ee8bd95cf84 7.0 -850a5c155c48b6ecfbb83b961586ea359b561522 8.0b1 -7ea0e7498e4ddbf63b6929ee83c75a9207996b08 8.0 -1af3a5f24f7dd4e51d117f701918052b7de65c99 8.1b1 -d62bf4e407b3b9b5bedcc1396a9ba46f35571902 8.0.1 -1c03d512e39d5cfd711ae3ed7e316769f427e43b 8.0.2 -6c3467488123ce70b1dd009145a02f51fb78cdcc 8.0.3 -2c467afffe9fe1e14618b576fac6b4f7c412a61e 8.0.4 -3f87370b6863e5a4e831b394ef1a58e0e97a4336 8.1 -995f6d9651312cd481ca1e5ddb271cbdd0474c57 8.2 -efbe39dae0aba9a7db399f6442758ae94e315c93 8.2.1 -cd14b2a72e51c7d13873ab6c2041f901b1a7a1cd 8.3 -0eee586a153f068142c1a0df4bc2635ed2c1a1cc 9.0b1 -921e60a0f9067311571fde9ccf2f35223159d9f6 8.4 -0d7b9b63d06ab7f68bc8edd56cb2034e6395d7fc 9.0 -fa069bf2411a150c9379d31a04d1c3836e2d3027 9.0.1 -3ed27d68d3f41bb5daa2afecfa9180d5958fe9d3 9.1 -0c4d18a747a6d39bff8e194a58af949a960d674a 10.0 -4c41e2cdd70beb0da556d71f46a67734c14f2bc2 10.0.1 -26b00011ec65b8f7b4f3d51078ec0a694701a45c 10.1 -651d41db58849d4fc50e466f4dc458d448480c4e 10.2 -1f5de53c079d577ead9d80265c9e006503b16457 10.2.1 -b4b92805bc0e9802da0b597d00df4fa42b30bc40 11.0 -6cd2b18f4be2a9c188fa505b34505b32f4a4554b 11.1 -feb5971e7827483bbdeb67613126bb79ed09e6d9 11.2 -a1a6a1ac9113b90009052ca7263174a488434099 11.3 -1116e568f534ad8f4f41328a0f5fa183eb739c90 11.3.1 -55666947c9eb7e3ba78081ad6ae004807c84aede 12.0 -747018b2e35a40cb4b1c444f150f013d02197c64 12.0.1 -a177ea34bf81662b904fe3af46f3c8719a947ef1 12.0.2 -bf8c5bcacd49bf0f9648013a40ebfc8f7c727f7b 12.0.3 -73dcfc90e3eecec6baddea19302c6b342e68e2fa 12.0.4 -01fbfc9194a2bc502edd682eebbf4d2f1bc79eee 12.0.5 -7bca8938434839dbb546b8bfccd9aab7a86d851e 12.1 -5ff5c804a8fa580cff499ba0025ff2e6a5474fd0 12.2 -8d50aac3b20793954121edb300b477cc75f3ec96 12.3 -297931cb8cac7d44d970adb927efd6cb36ac3526 12.4 -df34cc18624279faffdbc729c0a11e6ab0f46572 13.0 -ae1a5c5cf78f4f9f98c054f1c8cec6168d1d19b4 13.0.1 -e22a1d613bddf311e125eecd9c1e1cad02ab5063 13.0.2 -a3a105f795f8362f26e84e9acbc237ee2d6bcca4 14.0 -9751a1671a124e30ae344d1510b9c1dbb14f2775 14.1 -07fcc3226782b979cedaaf456c7f1c5b2fdafd2c 14.1.1 -d714fb731de779a1337d2d78cd413931f1f06193 14.2 -e3c635a7d463c7713c647d1aa560f83fd8e27ef0 14.3 -608948cef7e0ab8951691b149f5b6f0184a5635e 14.3.1 -617699fd3e44e54b6f95b80bfcf78164df37f266 15.0b1 -d2c4d84867154243993876d6248aafec1fd12679 15.0 -10fde952613b7a3f650fb1f6b6ed58cbd232fa3c 15.1 -df5dc9c7aa7521f552824dee1ed1315cfe180844 15.2 -e0825f0c7d5963c498266fe3c175220c695ae83b 16.0 -8e56240961015347fed477f00ca6a0783e81d3a2 17.0 -a37bcaaeab367f2364ed8c070659d52a4c0ae38e 17.1 -4a0d01d690ff184904293e7a3244ac24ec060a73 17.1.1 -fac98a49bd984ef5accf7177674d693277bfbaef 18.0b1 -0a49ee524b0a1d67d2a11c8c22f082b57acd7ae1 18.0 -e364795c1b09c70b6abb53770e09763b52bf807d 18.0.1 -c0395f556c35d8311fdfe2bda6846b91149819cd 18.1 -1a981f2e5031f55267dc2a28fa1b42274a1b64b2 18.2 -b59320212c8371d0be9e5e6c5f7eec392124c009 18.3 -7a705b610abb1177ca169311c4ee261f3e4f0957 18.3.1 -1e120f04bcaa2421c4df0eb6678c3019ba4a82f6 18.3.2 -6203335278be7543d31790d9fba55739469a4c6c 18.4 -31dc6d2ac0f5ab766652602fe6ca716fff7180e7 18.5 -dfe190b09908f6b953209d13573063809de451b8 18.6 -804f87045a901f1dc121cf9149143d654228dc13 18.6.1 -67d07805606aead09349d5b91d7d26c68ddad2fc 18.7 -3041e1fc409be90e885968b90faba405420fc161 18.7.1 -c811801ffa1de758cf01fbf6a86e4c04ff0c0935 18.8 -fbf06fa35f93a43f044b1645a7e4ff470edb462c 18.8.1 -cc41477ecf92f221c113736fac2830bf8079d40c 19.0 -834782ce49154e9744e499e00eb392c347f9e034 19.1 -0a2a3d89416e1642cf6f41d22dbc07b3d3c15a4d 19.1.1 -5d24cf9d1ced76c406ab3c4a94c25d1fe79b94bc 19.2 -66fa131a0d77a1b0e6f89ccb76b254cfb07d3da3 19.3b1 -32bba9bf8cce8350b560a7591c9ef5884a194211 19.3 -f47f3671508b015e9bb735603d3a0a6ec6a77b01 19.4 -0bda3291ac725750b899b4ba3e4b6765e7645daa 19.4.1 -0a68cbab72580a6f8d3bf9c45206669eefcd256b 19.5 -34121bf49b1a7ac77da7f7c75105c8a920218dd7 19.6b1 -3c2332e4ec72717bf17321473e5c3ad6e5778903 19.6 -35d9179d04390aada66eceae9ceb7b9274f67646 19.6.1 -d2782cbb2f15ca6831ab9426fbf8d4d6ca60db8a 19.6.2 -c6e619ce910d1650cc2433f94e5594964085f973 19.7 -2a60daeff0cdb039b20b2058aaad7dae7bcd2c1c 20.0 -06c9d3ffae80d7f5786c0a454d040d253d47fc03 20.1 -919a40f1843131249f98104c73f3aee3fc835e67 20.1.1 -74c4ffbe1f399345eb4f6a64785cfff54f7e6e7e 20.2 -1aacb05fbdfe06cee904e7a138a4aa6df7b88a63 20.2.1 -48aa5271ef1cd5379cf91a1c958e490692b978e7 20.2.2 -9c55a3a1268a33b4a57b96b2b9fa2cd0701780ee 20.3 -3e87e975a95c780eec497ef9e5a742f7adfb77ec 20.3.1 -06692c64fb9b5843331a918ab7093f151412ec8e 20.4 -f8174392e9e9c6a21ea5df0f22cb4ca885c799ca 20.5 -114f3dbc8a73dacbce2ebe08bb70ca76ab18390e v20.6.0 -a3d4006688fe5e754d0e709a52a00b8191819979 v20.6.1 -2831509712601a78fddf46e51d6f41ae0f92bd0e v20.6.2 -8b46dc41cb234c435b950a879214a6dee54c9dd2 v20.6.3 -7258be20fe93bbf936dc1a81ce71c04c5880663e v20.6.4 -7e0ab283db4e6f780777f7f06af475f044631fa1 v20.6.5 -57d63b38e85515d06e06d3cea62e35e6c54b5093 v20.6.6 -57d63b38e85515d06e06d3cea62e35e6c54b5093 v20.6.6 -b04dbdd161d7f68903a53e1dbd1fa5b5fde73f94 v20.6.6 -0804d30b6ead64e0e324aefd67439b84df2d1c01 v20.6.7 -a00910db03ec15865e4c8506820d4ad1df3e26f3 v20.6.8 -0262ab29fc2417b502a55f49b7fd43528fbd3df4 v20.7.0 -7f56b6f40de39456c78507a14c288709712881cb v20.8.0 -8cf9340669ae26e2b31f68b9c3f885ab7bdd65ce v20.8.1 -8bf8aaa139bb6a36fcd243214d6730a214ae08f5 v20.9.0 -c72faa468919fd2f226c97e94d4e64a6506860e5 v20.10.0 -3b5fdd077c7d83d02c4979ad69cc0bf199b47587 v20.10.1 -ddd3f81eb9e0860bf95c380c50a72c52a215231f v21.0.0 -018e4a727cf691d6404cd24ffb25e8eebea2fad4 v20.6.8 -02643fe9503033edd2fc5a54c8d4361a6c185be4 v21.1.0 -40b8fac6db119aca9c462993d01908492769fc4f v21.2.0 -40b8fac6db119aca9c462993d01908492769fc4f v21.2.0 -9959424676a4aac1c14e430ff6f4210fdb0442d9 v21.2.0 -694111eadb10fe6003078895a2cbb803ce514ef2 v21.2.1 -274f33435e9c3ba5019f2a2bfe478fa2db0da41d v21.2.2 -451fbedb4c226d8ea5b6eab1e21679c9a4ec4a93 v22.0.0 -f5c4923b0400d61f67699c2d54388878f9e0c8bd v22.0.1 -8610a8b9635f15d33f94fccb295fd34aa6fbddee v22.0.2 -efee7d74a8478c0d08c801fb520e41b6e04d0dda v22.0.3 -77b20c09b04775cc936ab5d16cbc46ff05fc7080 v22.0.4 -d5832e5deb77027da474e79e5f047e9a81f7edf8 v22.0.5 -8664c631bf3a817a7deba86c13b67eccc1f81091 v23.0.0 -6c74559c732c56f61b465d613458ec1a930884b6 v23.1.0 -65b3fe899db4086e66afa067a1311eea2a88d5e2 v23.2.0 -e10c848a82ffb925741c65dd8a8fc8b50b3c3e14 v23.2.1 -a011298221c3d47aa539ae4c119c51861caf6438 v23.2.1 -8d37b17a93ec3e5fff9e040fc3f14ab7b7b24b2c v24.0.0 -130a58f9503fe07ca8c7a34675b7d3a976f163d7 v24.0.1 -7996c56bf6a2f81427b2f91eb11e64d690353493 v24.0.2 -d425bd1ee620772fe90e0dd2a7530b0d6a642601 v24.0.3 -a7d2f79f0996d881794af0f87595032098202811 v24.1.0 -d29075e7f8797891e8c59fb58c4d8d1b79954b34 v24.1.1 -ed9e7bd8caf95261d528ee3db117611dc42814eb v24.2.0 -5b577d179a7e2f3020712c376c0200901e5c93c1 v24.2.1 -83ca05973c16102145b339aec7e170d94966a2ba v24.3.0 -e14229dd2abc034530447d64ed87fddb944347bd v24.3.1 -58e92028ab0061f1f80d98e769c9143305275242 v25.0.0 -0591bce16c7f94191cea925929cc8b0ce6baca09 v25.0.1 -dc92db3e29a4d1ac57d383091e6cf734d04ed54b v25.0.2 -dc92db3e29a4d1ac57d383091e6cf734d04ed54b v25.0.2 -91eeaf2f33db99d8f78f8261931a1aea8fe8d952 v25.0.2 -529a76a860c50d3cc262759b5b9ce28f171236f9 v25.1.0 -392ee093902e14a1d2a6eefc389a7b9ac78b3f9e v25.1.1 -1cbb29c235439331a76c7b6b5cf8701f763478d3 v25.1.2 -c350190e7bbf274e6728f14af7451b1fd3aaeba2 v25.1.2 -86e668badaf45315bb8506ac2312665d129a0322 v25.1.3 -6f55250b9c5856557ac669d1f966bba8be9eb1d2 v25.1.4 -76143bb477b50314ab6f4ccc4ced80ee43f0dc94 v25.1.5 -2db4c66aeae47217aaf92099a9875e9e810c9cbb v25.1.6 -2083d7c3fadcf15b3bc07f7532440efbcf8fd18d v25.2.0 -2371456ae99d11187c33deacf1308aded31081d9 v25.3.0 -f713f9faaaa33c0e9a628dc9322ef8d1fbeb8319 v25.4.0 -7cb13a0cd176f39500701b24dbfec603ead5110c v26.0.0 diff --git a/.travis.yml b/.travis.yml index ed077d94..b8bca7cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ env: - LC_ALL=C LC_CTYPE=C script: # avoid VersionConflict when newer version is required - - pip install -U pytest + - pip install -U 'pytest>=3.0.2' # Output the env, because the travis docs just can't be trusted - env diff --git a/CHANGES.rst b/CHANGES.rst index 5e5bd614..69f966d7 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -2,6 +2,129 @@ CHANGES ======= +v28.4.0 +------- + +* #732: Now extras with a hyphen are honored per PEP 426. +* #811: Update to pyparsing 2.1.10. +* Updated ``setuptools.command.sdist`` to re-use most of + the functionality directly from ``distutils.command.sdist`` + for the ``add_defaults`` method with strategic overrides. + See #750 for rationale. + +v28.3.0 +------- + +* #809: In ``find_packages()``, restore support for excluding + a parent package without excluding a child package. + +* #805: Disable ``-nspkg.pth`` behavior on Python 3.3+ where + PEP-420 functionality is adequate. Fixes pip #1924. + +v28.1.0 +------- + +* #803: Bump certifi to 2016.9.26. + +v28.0.0 +------- + +* #733: Do not search excluded directories for packages. + This introduced a backwards incompatible change in ``find_packages()`` + so that ``find_packages(exclude=['foo']) == []``, excluding subpackages of ``foo``. + Previously, ``find_packages(exclude=['foo']) == ['foo.bar']``, + even though the parent ``foo`` package was excluded. + +* #795: Bump certifi. + +* #719: Suppress decoding errors and instead log a warning + when metadata cannot be decoded. + +v27.3.1 +------- + +* #790: In MSVC monkeypatching, explicitly patch each + function by name in the target module instead of inferring + the module from the function's ``__module__``. Improves + compatibility with other packages that might have previously + patched distutils functions (i.e. NumPy). + +v27.3.0 +------- + +* #794: In test command, add installed eggs to PYTHONPATH + when invoking tests so that subprocesses will also have the + dependencies available. Fixes `tox 330 + <https://github.com/tox-dev/tox/issues/330>`_. + +* #795: Update vendored pyparsing 2.1.9. + +v27.2.0 +------- + +* #520 and #513: Suppress ValueErrors in fixup_namespace_packages + when lookup fails. + +* Nicer, more consistent interfaces for msvc monkeypatching. + +v27.1.2 +------- + +* #779 via #781: Fix circular import. + +v27.1.1 +------- + +* #778: Fix MSVC monkeypatching. + +v27.1.0 +------- + +* Introduce the (private) ``monkey`` module to encapsulate + the distutils monkeypatching behavior. + +v27.0.0 +------- + +* Now use Warehouse by default for + ``upload``, patching ``distutils.config.PyPIRCCommand`` to + affect default behavior. + + Any config in .pypirc should be updated to replace + + https://pypi.python.org/pypi/ + + with + + https://upload.pypi.org/legacy/ + + Similarly, any passwords stored in the keyring should be + updated to use this new value for "system". + + The ``upload_docs`` command will continue to use the python.org + site, but the command is now deprecated. Users are urged to use + Read The Docs instead. + +* #776: Use EXT_SUFFIX for py_limited_api renaming. + +* #774 and #775: Use LegacyVersion from packaging when + detecting numpy versions. + +v26.1.1 +------- + +* Re-release of 26.1.0 with pytest pinned to allow for automated + deployment and thus proper packaging environment variables, + fixing issues with missing executable launchers. + +v26.1.0 +------- + +* #763: ``pkg_resources.get_default_cache`` now defers to the + `appdirs project <https://pypi.org/project/appdirs>`_ to + resolve the cache directory. Adds a vendored dependency on + appdirs to pkg_resources. + v26.0.0 ------- @@ -21,7 +144,7 @@ v25.4.0 ------- * Add Extension(py_limited_api=True). When set to a truthy value, - that extension gets a filename apropriate for code using Py_LIMITED_API. + that extension gets a filename appropriate for code using Py_LIMITED_API. When used correctly this allows a single compiled extension to work on all future versions of CPython 3. The py_limited_api argument only controls the filename. To be diff --git a/docs/setuptools.txt b/docs/setuptools.txt index 0f955663..5ce2c7b1 100644 --- a/docs/setuptools.txt +++ b/docs/setuptools.txt @@ -106,9 +106,9 @@ the distutils. Here's a minimal setup script using setuptools:: from setuptools import setup, find_packages setup( - name = "HelloWorld", - version = "0.1", - packages = find_packages(), + name="HelloWorld", + version="0.1", + packages=find_packages(), ) As you can see, it doesn't take much to use setuptools in a project. @@ -130,16 +130,16 @@ dependencies, and perhaps some data files and scripts:: from setuptools import setup, find_packages setup( - name = "HelloWorld", - version = "0.1", - packages = find_packages(), - scripts = ['say_hello.py'], + name="HelloWorld", + version="0.1", + packages=find_packages(), + scripts=['say_hello.py'], # Project uses reStructuredText, so ensure that the docutils get # installed or upgraded on the target machine - install_requires = ['docutils>=0.3'], + install_requires=['docutils>=0.3'], - package_data = { + package_data={ # If any package contains *.txt or *.rst files, include them: '': ['*.txt', '*.rst'], # And include any *.msg files found in the 'hello' package, too: @@ -147,12 +147,12 @@ dependencies, and perhaps some data files and scripts:: }, # metadata for upload to PyPI - author = "Me", - author_email = "me@example.com", - description = "This is an Example Package", - license = "PSF", - keywords = "hello world example examples", - url = "http://example.com/HelloWorld/", # project home page, if any + author="Me", + author_email="me@example.com", + description="This is an Example Package", + license="PSF", + keywords="hello world example examples", + url="http://example.com/HelloWorld/", # project home page, if any # could also include long_description, download_url, classifiers, etc. ) @@ -431,7 +431,7 @@ the same directory as the setup script. Some projects use a ``src`` or ``lib`` directory as the root of their source tree, and those projects would of course use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``. (And -such projects also need something like ``package_dir = {'':'src'}`` in their +such projects also need something like ``package_dir={'':'src'}`` in their ``setup()`` arguments, but that's just a normal distutils thing.) Anyway, ``find_packages()`` walks the target directory, filtering by inclusion @@ -522,7 +522,7 @@ as the following:: setup( # other arguments here... - entry_points = { + entry_points={ 'setuptools.installation': [ 'eggsecutable = my_package.some_module:main_func', ] @@ -674,7 +674,7 @@ installed:: setup( ... - dependency_links = [ + dependency_links=[ "http://peak.telecommunity.com/snapshots/" ], ) @@ -699,7 +699,7 @@ For example, let's say that Project A offers optional PDF and reST support:: setup( name="Project-A", ... - extras_require = { + extras_require={ 'PDF': ["ReportLab>=1.2", "RXP"], 'reST': ["docutils>=0.3"], } @@ -721,7 +721,7 @@ declare it like this, so that the "PDF" requirements are only resolved if the setup( name="Project-A", ... - entry_points = { + entry_points={ 'console_scripts': [ 'rst2pdf = project_a.tools.pdfgen [PDF]', 'rst2html = project_a.tools.htmlgen', @@ -736,7 +736,7 @@ might declare the dependency like this:: setup( name="Project-B", - install_requires = ["Project-A[PDF]"], + install_requires=["Project-A[PDF]"], ... ) @@ -759,7 +759,7 @@ setup to this:: setup( name="Project-A", ... - extras_require = { + extras_require={ 'PDF': [], 'reST': ["docutils>=0.3"], } @@ -784,7 +784,7 @@ e.g.:: from setuptools import setup, find_packages setup( ... - include_package_data = True + include_package_data=True ) This tells setuptools to install any data files it finds in your packages. @@ -801,7 +801,7 @@ e.g.:: from setuptools import setup, find_packages setup( ... - package_data = { + package_data={ # If any package contains *.txt or *.rst files, include them: '': ['*.txt', '*.rst'], # And include any *.msg files found in the 'hello' package, too: @@ -828,10 +828,10 @@ The setuptools setup file might look like this:: from setuptools import setup, find_packages setup( ... - packages = find_packages('src'), # include all packages under src - package_dir = {'':'src'}, # tell distutils packages are under src + packages=find_packages('src'), # include all packages under src + package_dir={'':'src'}, # tell distutils packages are under src - package_data = { + package_data={ # If any package contains *.txt files, include them: '': ['*.txt'], # And include any *.dat files found in the 'data' subdirectory @@ -868,13 +868,13 @@ to do things like this:: from setuptools import setup, find_packages setup( ... - packages = find_packages('src'), # include all packages under src - package_dir = {'':'src'}, # tell distutils packages are under src + packages=find_packages('src'), # include all packages under src + package_dir={'':'src'}, # tell distutils packages are under src - include_package_data = True, # include everything in source control + include_package_data=True, # include everything in source control # ...but exclude README.txt from all packages - exclude_package_data = { '': ['README.txt'] }, + exclude_package_data={'': ['README.txt']}, ) The ``exclude_package_data`` option is a dictionary mapping package names to @@ -1035,21 +1035,21 @@ for our hypothetical blogging tool:: setup( # ... - entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'} + entry_points={'blogtool.parsers': '.rst = some_module:SomeClass'} ) setup( # ... - entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']} + entry_points={'blogtool.parsers': ['.rst = some_module:a_func']} ) setup( # ... - entry_points = """ + entry_points=""" [blogtool.parsers] .rst = some.nested.module:SomeClass.some_classmethod [reST] """, - extras_require = dict(reST = "Docutils>=0.3.5") + extras_require=dict(reST="Docutils>=0.3.5") ) The ``entry_points`` argument to ``setup()`` accepts either a string with @@ -1343,7 +1343,7 @@ participates in. For example, the ZopeInterface project might do this:: setup( # ... - namespace_packages = ['zope'] + namespace_packages=['zope'] ) because it contains a ``zope.interface`` package that lives in the ``zope`` @@ -1599,7 +1599,7 @@ to specify your ``install_requires`` (or other requirements) to include .. code-block:: python - install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"] + install_requires=["OtherProject>=0.2a1.dev-r143,==dev"] The above example says, "I really want at least this particular development revision number, but feel free to follow and use an ``#egg=OtherProject-dev`` @@ -2303,7 +2303,7 @@ available: setup( # ... - test_suite = "my_package.tests.test_all" + test_suite="my_package.tests.test_all" ) If you did not set a ``test_suite`` in your ``setup()`` call, and do not @@ -2427,7 +2427,7 @@ project's setup script:: setup( # ... - entry_points = { + entry_points={ "distutils.commands": [ "foo = mypackage.some_module:foo", ], @@ -2459,7 +2459,7 @@ distutils extension project's setup script:: setup( # ... - entry_points = { + entry_points={ "distutils.commands": [ "foo = mypackage.some_module:foo", ], @@ -2521,7 +2521,7 @@ project that uses the argument:: setup( # ... - entry_points = { + entry_points={ "distutils.setup_keywords": [ "foo_bar = setuptools.dist:assert_string_list", ], @@ -2540,7 +2540,7 @@ a file. Here's what the writer utility looks like:: argname = os.path.splitext(basename)[0] value = getattr(cmd.distribution, argname, None) if value is not None: - value = '\n'.join(value)+'\n' + value = '\n'.join(value) + '\n' cmd.write_or_delete_file(argname, filename, value) As you can see, ``egg_info.writers`` entry points must be a function taking @@ -2582,9 +2582,9 @@ called "foobar", you would write a function something like this: And you would register it in a setup script using something like this:: - entry_points = { + entry_points={ "setuptools.file_finders": [ - "foobar = my_foobar_module:find_files_for_foobar" + "foobar = my_foobar_module:find_files_for_foobar", ] } @@ -2664,4 +2664,3 @@ set of steps to reproduce. .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ .. _setuptools bug tracker: https://github.com/pypa/setuptools/ - diff --git a/pavement.py b/pavement.py index 3d840086..f85617d4 100644 --- a/pavement.py +++ b/pavement.py @@ -12,9 +12,11 @@ def remove_all(paths): @task def update_vendored(): vendor = Path('pkg_resources/_vendor') + # pip uninstall doesn't support -t, so do it manually remove_all(vendor.glob('packaging*')) remove_all(vendor.glob('six*')) remove_all(vendor.glob('pyparsing*')) + remove_all(vendor.glob('appdirs*')) install_args = [ 'install', '-r', str(vendor / 'vendored.txt'), diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 27d70a60..1b8d02f5 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1,3 +1,5 @@ +# coding: utf-8 + """ Package resource API -------------------- @@ -65,6 +67,7 @@ try: except ImportError: importlib_machinery = None +from pkg_resources.extern import appdirs from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') @@ -1358,48 +1361,15 @@ class ResourceManager: def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name != 'nt': - return os.path.expanduser('~/.python-eggs') - - # XXX this may be locale-specific! - app_data = 'Application Data' - app_homes = [ - # best option, should be locale-safe - (('APPDATA',), None), - (('USERPROFILE',), app_data), - (('HOMEDRIVE', 'HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - # 95/98/ME - (('WINDIR',), app_data), - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname, subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE environment variable" - ) + Return the ``PYTHON_EGG_CACHE`` environment variable + or a platform-relevant user cache dir for an app + named "Python-Eggs". + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') + or appdirs.user_cache_dir(appname='Python-Eggs') + ) def safe_name(name): @@ -1428,7 +1398,7 @@ def safe_extra(extra): Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() def to_filename(name): @@ -1889,17 +1859,21 @@ class FileMetadata(EmptyProvider): return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): - if name == 'PKG-INFO': - with io.open(self.path, encoding='utf-8') as f: - try: - metadata = f.read() - except UnicodeDecodeError as exc: - # add path context to error message - tmpl = " in {self.path}" - exc.reason += tmpl.format(self=self) - raise - return metadata - raise KeyError("No metadata except PKG-INFO is available") + if name != 'PKG-INFO': + raise KeyError("No metadata except PKG-INFO is available") + + with io.open(self.path, encoding='utf-8', errors="replace") as f: + metadata = f.read() + self._warn_on_replacement(metadata) + return metadata + + def _warn_on_replacement(self, metadata): + # Python 2.6 and 3.2 compat for: replacement_char = '�' + replacement_char = b'\xef\xbf\xbd'.decode('utf-8') + if replacement_char in metadata: + tmpl = "{self.path} could not be properly decoded in UTF-8" + msg = tmpl.format(**locals()) + warnings.warn(msg) def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -2098,6 +2072,15 @@ def _rebuild_mod_path(orig_path, package_name, module): """ sys_path = [_normalize_cached(p) for p in sys.path] + def safe_sys_path_index(entry): + """ + Workaround for #520 and #513. + """ + try: + return sys_path.index(entry) + except ValueError: + return float('inf') + def position_in_sys_path(path): """ Return the ordinal of the path based on its position in sys.path @@ -2105,7 +2088,7 @@ def _rebuild_mod_path(orig_path, package_name, module): path_parts = path.split(os.sep) module_parts = package_name.count('.') + 1 parts = path_parts[:-module_parts] - return sys_path.index(_normalize_cached(os.sep.join(parts))) + return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) orig_path.sort(key=position_in_sys_path) module.__path__[:] = [_normalize_cached(p) for p in orig_path] @@ -2807,8 +2790,8 @@ class DistInfoDistribution(Distribution): dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - extra = safe_extra(extra.strip()) - dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) + s_extra = safe_extra(extra.strip()) + dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) return dm diff --git a/pkg_resources/_vendor/appdirs.py b/pkg_resources/_vendor/appdirs.py new file mode 100644 index 00000000..f4dba095 --- /dev/null +++ b/pkg_resources/_vendor/appdirs.py @@ -0,0 +1,552 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See <http://github.com/ActiveState/appdirs> for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 0) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> + Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> + Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/<AppName>', + if XDG_DATA_DIRS is not set + + Typical user data directories are: + Mac OS X: /Library/Application Support/<AppName> + Unix: /usr/local/share/<AppName> or /usr/share/<AppName> + Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set + + Typical user data directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Logs/<AppName> + Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname, appauthor=None, version=None, roaming=False, + multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernal.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", "site_data_dir", + "user_config_dir", "site_config_dir", + "user_cache_dir", "user_log_dir") + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py index 9b6353f0..7f5a76cf 100644 --- a/pkg_resources/_vendor/packaging/specifiers.py +++ b/pkg_resources/_vendor/packaging/specifiers.py @@ -198,7 +198,7 @@ class _IndividualSpecifier(BaseSpecifier): (prereleases or self.prereleases)): found_prereleases.append(version) # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. + # accepting prereleases from the begining. else: yielded = True yield version diff --git a/pkg_resources/_vendor/pyparsing.py b/pkg_resources/_vendor/pyparsing.py index 89cffc10..a2122435 100644 --- a/pkg_resources/_vendor/pyparsing.py +++ b/pkg_resources/_vendor/pyparsing.py @@ -1,6 +1,6 @@ # module pyparsing.py
#
-# Copyright (c) 2003-2015 Paul T. McGuire
+# Copyright (c) 2003-2016 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -31,15 +31,18 @@ vs. the traditional lex/yacc approach, or the use of regular expressions. With don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.
-Here is a program to parse "Hello, World!" (or any greeting of the form C{"<salutation>, <addressee>!"})::
+Here is a program to parse "Hello, World!" (or any greeting of the form
+C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements
+(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
from pyparsing import Word, alphas
# define grammar of a greeting
- greet = Word( alphas ) + "," + Word( alphas ) + "!"
+ greet = Word(alphas) + "," + Word(alphas) + "!"
hello = "Hello, World!"
- print (hello, "->", greet.parseString( hello ))
+ print (hello, "->", greet.parseString(hello))
The program outputs the following::
@@ -48,7 +51,7 @@ The program outputs the following:: The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.
-The parsed results returned from L{I{ParserElement.parseString}<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
+The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
object with named attributes.
The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
@@ -57,8 +60,8 @@ The pyparsing module handles some of the problems that are typically vexing when - embedded comments
"""
-__version__ = "2.1.8"
-__versionTime__ = "14 Aug 2016 08:43 UTC"
+__version__ = "2.1.10"
+__versionTime__ = "07 Oct 2016 01:31 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
@@ -107,7 +110,7 @@ __all__ = [ 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
-'tokenMap', 'pyparsing_common',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
]
system_version = tuple(sys.version_info)[:3]
@@ -291,7 +294,7 @@ class _ParseResultsWithOffset(object): def __getitem__(self,i):
return self.tup[i]
def __repr__(self):
- return repr(self.tup)
+ return repr(self.tup[0])
def setOffset(self,i):
self.tup = (self.tup[0],i)
@@ -310,6 +313,7 @@ class ParseResults(object): # equivalent form:
# date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+ # parseString returns a ParseResults object
result = date_str.parseString("1999/12/31")
def test(s, fn=repr):
@@ -836,8 +840,8 @@ class ParseResults(object): return None
elif (len(self) == 1 and
len(self.__tokdict) == 1 and
- self.__tokdict.values()[0][0][1] in (0,-1)):
- return self.__tokdict.keys()[0]
+ next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+ return next(iter(self.__tokdict.keys()))
else:
return None
@@ -864,7 +868,7 @@ class ParseResults(object): out.append( indent+_ustr(self.asList()) )
if full:
if self.haskeys():
- items = sorted(self.items())
+ items = sorted((str(k), v) for k,v in self.items())
for k,v in items:
if out:
out.append(NL)
@@ -875,7 +879,7 @@ class ParseResults(object): else:
out.append(_ustr(v))
else:
- out.append(_ustr(v))
+ out.append(repr(v))
elif any(isinstance(vv,ParseResults) for vv in self):
v = self
for i,vv in enumerate(v):
@@ -949,7 +953,7 @@ def col (loc,strg): positions within the parsed string.
"""
s = strg
- return 1 if loc<len(s) and s[loc] == '\n' else loc - s.rfind("\n", 0, loc)
+ return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
def lineno(loc,strg):
"""Returns current line number within a string, counting newlines as line separators.
@@ -1775,7 +1779,15 @@ class ParserElement(object): def __add__(self, other ):
"""
- Implementation of + operator - returns C{L{And}}
+ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+ converts them to L{Literal}s by default.
+
+ Example::
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+ Prints::
+ Hello, World! -> ['Hello', ',', 'World', '!']
"""
if isinstance( other, basestring ):
other = ParserElement._literalStringClass( other )
@@ -1972,7 +1984,7 @@ class ParserElement(object): def __call__(self, name=None):
"""
- Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}.
+ Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
passed as C{True}.
@@ -2083,7 +2095,8 @@ class ParserElement(object): Match alphaword at loc 15(1,16)
Exception raised:Expected alphaword (at char 15), (line:1, col:16)
- The output shown is that produced by the default debug actions. Prior to attempting
+ The output shown is that produced by the default debug actions - custom debug actions can be
+ specified using L{setDebugActions}. Prior to attempting
to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
@@ -2159,7 +2172,7 @@ class ParserElement(object): def matches(self, testString, parseAll=True):
"""
Method for quick testing of a parser against a test string. Good for simple
- inline microtests of sub expressions while building up larger parser.0
+ inline microtests of sub expressions while building up larger parser.
Parameters:
- testString - to test against this expression for a match
@@ -2254,6 +2267,13 @@ class ParserElement(object): FAIL: Expected end of text (at char 4), (line:1, col:5)
Success
+
+ Each test string must be on a single line. If you want to test a string that spans multiple
+ lines, create a test like this::
+
+ expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+ (Note that this is a raw string literal, you must include the leading 'r'.)
"""
if isinstance(tests, basestring):
tests = list(map(str.strip, tests.rstrip().splitlines()))
@@ -2271,6 +2291,7 @@ class ParserElement(object): out = ['\n'.join(comments), t]
comments = []
try:
+ t = t.replace(r'\n','\n')
result = self.parseString(t, parseAll=parseAll)
out.append(result.dump(full=fullDump))
success = success and not failureTests
@@ -2393,8 +2414,10 @@ class Keyword(Token): """
DEFAULT_KEYWORD_CHARS = alphanums+"_$"
- def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ):
+ def __init__( self, matchString, identChars=None, caseless=False ):
super(Keyword,self).__init__()
+ if identChars is None:
+ identChars = Keyword.DEFAULT_KEYWORD_CHARS
self.match = matchString
self.matchLen = len(matchString)
try:
@@ -2469,7 +2492,7 @@ class CaselessKeyword(Keyword): (Contrast with example for L{CaselessLiteral}.)
"""
- def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ):
+ def __init__( self, matchString, identChars=None ):
super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
def parseImpl( self, instring, loc, doActions=True ):
@@ -2478,6 +2501,67 @@ class CaselessKeyword(Keyword): return loc+self.matchLen, self.match
raise ParseException(instring, loc, self.errmsg, self)
+class CloseMatch(Token):
+ """
+ A variation on L{Literal} which matches "close" matches, that is,
+ strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+ - C{match_string} - string to be matched
+ - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+
+ The results from a successful parse will contain the matched text from the input string and the following named results:
+ - C{mismatches} - a list of the positions within the match_string where mismatches were found
+ - C{original} - the original match_string used to compare against the input string
+
+ If C{mismatches} is an empty list, then the match was an exact match.
+
+ Example::
+ patt = CloseMatch("ATCATCGAATGGA")
+ patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+ patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+ # exact match
+ patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+ # close match allowing up to 2 mismatches
+ patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+ patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+ """
+ def __init__(self, match_string, maxMismatches=1):
+ super(CloseMatch,self).__init__()
+ self.name = match_string
+ self.match_string = match_string
+ self.maxMismatches = maxMismatches
+ self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+ self.mayIndexError = False
+ self.mayReturnEmpty = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ start = loc
+ instrlen = len(instring)
+ maxloc = start + len(self.match_string)
+
+ if maxloc <= instrlen:
+ match_string = self.match_string
+ match_stringloc = 0
+ mismatches = []
+ maxMismatches = self.maxMismatches
+
+ for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+ src,mat = s_m
+ if src != mat:
+ mismatches.append(match_stringloc)
+ if len(mismatches) > maxMismatches:
+ break
+ else:
+ loc = match_stringloc + 1
+ results = ParseResults([instring[start:loc]])
+ results['original'] = self.match_string
+ results['mismatches'] = mismatches
+ return loc, results
+
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
class Word(Token):
"""
Token for matching words composed of allowed character sets.
@@ -2576,7 +2660,7 @@ class Word(Token): self.reString = r"\b"+self.reString+r"\b"
try:
self.re = re.compile( self.reString )
- except:
+ except Exception:
self.re = None
def parseImpl( self, instring, loc, doActions=True ):
@@ -2617,7 +2701,7 @@ class Word(Token): def __str__( self ):
try:
return super(Word,self).__str__()
- except:
+ except Exception:
pass
@@ -2646,7 +2730,7 @@ class Regex(Token): Example::
realnum = Regex(r"[+-]?\d+\.\d*")
- date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)')
+ date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
# ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
"""
@@ -2701,7 +2785,7 @@ class Regex(Token): def __str__( self ):
try:
return super(Regex,self).__str__()
- except:
+ except Exception:
pass
if self.strRepr is None:
@@ -2838,7 +2922,7 @@ class QuotedString(Token): def __str__( self ):
try:
return super(QuotedString,self).__str__()
- except:
+ except Exception:
pass
if self.strRepr is None:
@@ -2907,7 +2991,7 @@ class CharsNotIn(Token): def __str__( self ):
try:
return super(CharsNotIn, self).__str__()
- except:
+ except Exception:
pass
if self.strRepr is None:
@@ -3001,27 +3085,36 @@ class GoToColumn(_PositionToken): ret = instring[ loc: newloc ]
return newloc, ret
+
class LineStart(_PositionToken):
"""
Matches if current position is at the beginning of a line within the parse string
+
+ Example::
+
+ test = '''\
+ AAA this line
+ AAA and this line
+ AAA but not this one
+ B AAA and definitely not this one
+ '''
+
+ for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+ print(t)
+
+ Prints::
+ ['AAA', ' this line']
+ ['AAA', ' and this line']
+
"""
def __init__( self ):
super(LineStart,self).__init__()
- self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
self.errmsg = "Expected start of line"
- def preParse( self, instring, loc ):
- preloc = super(LineStart,self).preParse(instring,loc)
- if instring[preloc] == "\n":
- loc += 1
- return loc
-
def parseImpl( self, instring, loc, doActions=True ):
- if not( loc==0 or
- (loc == self.preParse( instring, 0 )) or
- (instring[loc-1] == "\n") ): #col(loc, instring) != 1:
- raise ParseException(instring, loc, self.errmsg, self)
- return loc, []
+ if col(loc, instring) == 1:
+ return loc, []
+ raise ParseException(instring, loc, self.errmsg, self)
class LineEnd(_PositionToken):
"""
@@ -3175,7 +3268,7 @@ class ParseExpression(ParserElement): def __str__( self ):
try:
return super(ParseExpression,self).__str__()
- except:
+ except Exception:
pass
if self.strRepr is None:
@@ -3647,7 +3740,7 @@ class ParseElementEnhance(ParserElement): def __str__( self ):
try:
return super(ParseElementEnhance,self).__str__()
- except:
+ except Exception:
pass
if self.strRepr is None and self.expr is not None:
@@ -3716,6 +3809,7 @@ class NotAny(ParseElementEnhance): class _MultipleMatch(ParseElementEnhance):
def __init__( self, expr, stopOn=None):
super(_MultipleMatch, self).__init__(expr)
+ self.saveAsList = True
ender = stopOn
if isinstance(ender, basestring):
ender = ParserElement._literalStringClass(ender)
@@ -3785,11 +3879,6 @@ class OneOrMore(_MultipleMatch): return self.strRepr
- def setResultsName( self, name, listAllMatches=False ):
- ret = super(OneOrMore,self).setResultsName(name,listAllMatches)
- ret.saveAsList = True
- return ret
-
class ZeroOrMore(_MultipleMatch):
"""
Optional repetition of zero or more of the given expression.
@@ -3866,6 +3955,7 @@ class Optional(ParseElementEnhance): """
def __init__( self, expr, default=_optionalNotMatched ):
super(Optional,self).__init__( expr, savelist=False )
+ self.saveAsList = self.expr.saveAsList
self.defaultValue = default
self.mayReturnEmpty = True
@@ -4274,7 +4364,10 @@ class OnlyOnce(object): def traceParseAction(f):
"""
- Decorator for debugging parse actions.
+ Decorator for debugging parse actions.
+
+ When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+ When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
Example::
wd = Word(alphas)
@@ -4339,9 +4432,16 @@ def countedArray( expr, intExpr=None ): integer expr expr expr...
where the leading integer tells how many expr expressions follow.
The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+
+ If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
Example::
countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']
+
+ # in this parser, the leading integer value is given in binary,
+ # '10' indicating that 2 values are in the array
+ binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+ countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']
"""
arrayExpr = Forward()
def countFieldParseAction(s,l,t):
@@ -4494,7 +4594,7 @@ def oneOf( strs, caseless=False, useRegex=True ): return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
else:
return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
- except:
+ except Exception:
warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
SyntaxWarning, stacklevel=2)
@@ -4640,7 +4740,7 @@ def srange(s): _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
try:
return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
- except:
+ except Exception:
return ""
def matchOnlyAtCol(n):
@@ -4726,10 +4826,10 @@ def tokenMap(func, *args): return pa
upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
-"""Helper parse action to convert tokens to upper case."""
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
-"""Helper parse action to convert tokens to lower case."""
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
def _makeTags(tagStr, xml):
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
@@ -4897,7 +4997,12 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): Helper method for constructing grammars of expressions made up of
operators working in a precedence hierarchy. Operators may be unary or
binary, left- or right-associative. Parse actions can also be attached
- to operator expressions.
+ to operator expressions. The generated parser will also recognize the use
+ of parentheses to override operator precedences (see example below).
+
+ Note: if you define a deep operator list, you may see performance issues
+ when using infixNotation. See L{ParserElement.enablePackrat} for a
+ mechanism to potentially improve your parser performance.
Parameters:
- baseExpr - expression representing the most basic element for the nested
@@ -4921,7 +5026,7 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): Example::
# simple example of four-function arithmetic with ints and variable names
- integer = pyparsing_common.signedInteger
+ integer = pyparsing_common.signed_integer
varname = pyparsing_common.identifier
arith_expr = infixNotation(integer | varname,
@@ -5241,23 +5346,27 @@ _commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + Optional( Word(" \t") +
~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
-"""Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.
+ This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""
# some other useful expressions - using lower-case class name since we are really using this as a namespace
class pyparsing_common:
"""
Here are some common low-level expressions that may be useful in jump-starting parser development:
- - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sciReal>})
+ - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
- common L{programming identifiers<identifier>}
- network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
- ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
- L{UUID<uuid>}
+ - L{comma-separated list<comma_separated_list>}
Parse actions:
- C{L{convertToInteger}}
- C{L{convertToFloat}}
- C{L{convertToDate}}
- C{L{convertToDatetime}}
- C{L{stripHTMLTags}}
+ - C{L{upcaseTokens}}
+ - C{L{downcaseTokens}}
Example::
pyparsing_common.number.runTests('''
@@ -5393,25 +5502,25 @@ class pyparsing_common: hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
"""expression that parses a hexadecimal integer, returns an int"""
- signedInteger = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+ signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
"""expression that parses an integer with optional leading sign, returns an int"""
- fraction = (signedInteger().setParseAction(convertToFloat) + '/' + signedInteger().setParseAction(convertToFloat)).setName("fraction")
+ fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
"""fractional expression of an integer divided by an integer, returns a float"""
fraction.addParseAction(lambda t: t[0]/t[-1])
- mixed_integer = (fraction | signedInteger + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+ mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
"""mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
mixed_integer.addParseAction(sum)
real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
"""expression that parses a floating point number and returns a float"""
- sciReal = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+ sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
"""expression that parses a floating point number with optional scientific notation and returns a float"""
# streamlining this expression makes the docs nicer-looking
- number = (sciReal | real | signedInteger).streamline()
+ number = (sci_real | real | signed_integer).streamline()
"""any numeric expression, returns the corresponding Python type"""
fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
@@ -5503,6 +5612,18 @@ class pyparsing_common: """
return pyparsing_common._html_stripper.transformString(tokens[0])
+ _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')
+ + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
+ comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
+ """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+ upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+ """Parse action to convert tokens to upper case."""
+
+ downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+ """Parse action to convert tokens to lower case."""
+
+
if __name__ == "__main__":
selectToken = CaselessLiteral("select")
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt index a30a409d..6b5eb450 100644 --- a/pkg_resources/_vendor/vendored.txt +++ b/pkg_resources/_vendor/vendored.txt @@ -1,3 +1,4 @@ packaging==16.7 -pyparsing==2.1.8 +pyparsing==2.1.10 six==1.10.0 +appdirs==1.4.0 diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py index 492f66f1..b4156fec 100644 --- a/pkg_resources/extern/__init__.py +++ b/pkg_resources/extern/__init__.py @@ -69,5 +69,5 @@ class VendorImporter: sys.meta_path.append(self) -names = 'packaging', 'pyparsing', 'six' +names = 'packaging', 'pyparsing', 'six', 'appdirs' VendorImporter(__name__, names).install() diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 2ed56233..3b13884b 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -221,6 +221,24 @@ class TestDistro: res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) assert res == [Foo, quux] + def test_marker_evaluation_with_extras_normlized(self): + """Extras are also evaluated as markers at resolution time.""" + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + # Metadata needs to be native strings due to cStringIO behaviour in + # 2.6, so use str(). + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.dist-info", + metadata=Metadata(("METADATA", str("Provides-Extra: baz-lightyear\n" + "Requires-Dist: quux; extra=='baz-lightyear'"))) + ) + ad.add(Foo) + assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] + quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") + ad.add(quux) + res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) + assert res == [Foo, quux] + def test_marker_evaluation_with_multiple_extras(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) @@ -1,6 +1,6 @@ [pytest] addopts=--doctest-modules --ignore release.py --ignore setuptools/lib2to3_ex.py --ignore tests/manual_test.py --ignore tests/shlib_test --doctest-glob=pkg_resources/api_tests.txt --ignore scripts/upload-old-releases-as-zip.py --ignore pavement.py -norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern .tox +norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern .* flake8-ignore = setuptools/site-patch.py F821 setuptools/py*compat.py F811 @@ -1,5 +1,5 @@ [bumpversion] -current_version = 26.0.0 +current_version = 28.3.0 commit = True tag = True @@ -88,7 +88,7 @@ def pypi_link(pkg_filename): setup_params = dict( name="setuptools", - version="26.0.0", + version="28.3.0", description="Easily download, build, install, upgrade, and uninstall " "Python packages", author="Python Packaging Authority", @@ -167,11 +167,11 @@ setup_params = dict( """).strip().splitlines(), extras_require={ "ssl:sys_platform=='win32'": "wincertstore==0.2", - "certs": "certifi==2016.8.8", + "certs": "certifi==2016.9.26", }, dependency_links=[ pypi_link( - 'certifi-2016.8.8.tar.gz#md5=b57513f7670482da45bb350b792f659e', + 'certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d', ), pypi_link( 'wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2', @@ -181,7 +181,8 @@ setup_params = dict( tests_require=[ 'setuptools[ssl]', 'pytest-flake8', - 'pytest>=2.8,!=3.0.0', + # pin pytest to 3.0.2 for pytest-dev/pytest#1888 + 'pytest==3.0.2', ] + (['mock'] if sys.version_info[:2] < (3, 3) else []), setup_requires=[ ] + pytest_runner + wheel, diff --git a/setuptools/__init__.py b/setuptools/__init__.py index cf0c39f2..baec3884 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -4,20 +4,20 @@ import os import functools import distutils.core import distutils.filelist -from distutils.core import Command as _Command from distutils.util import convert_path from fnmatch import fnmatchcase -from setuptools.extern.six.moves import filterfalse, map +from setuptools.extern.six.moves import filter, filterfalse, map import setuptools.version from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched +from setuptools.dist import Distribution, Feature from setuptools.depends import Require +from . import monkey __all__ = [ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' + 'find_packages', ] __version__ = setuptools.version.__version__ @@ -32,13 +32,18 @@ lib2to3_fixer_packages = ['lib2to3.fixes'] class PackageFinder(object): + """ + Generate a list of all Python packages found within a directory + """ @classmethod def find(cls, where='.', exclude=(), include=('*',)): """Return a list all Python packages found within directory 'where' - 'where' should be supplied as a "cross-platform" (i.e. URL-style) - path; it will be converted to the appropriate local path syntax. + 'where' is the root directory which will be searched for packages. It + should be supplied as a "cross-platform" (i.e. URL-style) path; it will + be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used as a wildcard in the names, such that 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). @@ -47,65 +52,51 @@ class PackageFinder(object): specified, only the named packages will be included. If it's not specified, all found packages will be included. 'include' can contain shell style wildcard patterns just like 'exclude'. - - The list of included packages is built up first and then any - explicitly excluded packages are removed from it. """ - out = cls._find_packages_iter(convert_path(where)) - out = cls.require_parents(out) - includes = cls._build_filter(*include) - excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) - out = filter(includes, out) - out = filterfalse(excludes, out) - return list(out) - @staticmethod - def require_parents(packages): - """ - Exclude any apparent package that apparently doesn't include its - parent. - - For example, exclude 'foo.bar' if 'foo' is not present. - """ - found = [] - for pkg in packages: - base, sep, child = pkg.rpartition('.') - if base and base not in found: - continue - found.append(pkg) - yield pkg + return list(cls._find_packages_iter( + convert_path(where), + cls._build_filter('ez_setup', '*__pycache__', *exclude), + cls._build_filter(*include))) - @staticmethod - def _candidate_dirs(base_path): + @classmethod + def _find_packages_iter(cls, where, exclude, include): """ - Return all dirs in base_path that might be packages. + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. """ - has_dot = lambda name: '.' in name - for root, dirs, files in os.walk(base_path, followlinks=True): - # Exclude directories that contain a period, as they cannot be - # packages. Mutate the list to avoid traversal. - dirs[:] = filterfalse(has_dot, dirs) - for dir in dirs: - yield os.path.relpath(os.path.join(root, dir), base_path) + for root, dirs, files in os.walk(where, followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] - @classmethod - def _find_packages_iter(cls, base_path): - candidates = cls._candidate_dirs(base_path) - return ( - path.replace(os.path.sep, '.') - for path in candidates - if cls._looks_like_package(os.path.join(base_path, path)) - ) + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if ('.' in dir or not cls._looks_like_package(full_path)): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) @staticmethod def _looks_like_package(path): + """Does a directory look like a package?""" return os.path.isfile(os.path.join(path, '__init__.py')) @staticmethod def _build_filter(*patterns): """ Given a list of patterns, return a callable that will be true only if - the input matches one of the patterns. + the input matches at least one of the patterns. """ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) @@ -121,7 +112,7 @@ find_packages = PackageFinder.find setup = distutils.core.setup -_Command = _get_unpatched(_Command) +_Command = monkey.get_unpatched(distutils.core.Command) class Command(_Command): @@ -143,10 +134,6 @@ class Command(_Command): return cmd -# we can't patch distutils.cmd, alas -distutils.core.Command = Command - - def _find_all_simple(path): """ Find all files under 'path' @@ -171,5 +158,4 @@ def findall(dir=os.curdir): return list(files) -# fix findall bug in distutils (http://bugs.python.org/issue12885) -distutils.filelist.findall = findall +monkey.patch_all() diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index f994b626..454c91fb 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -111,7 +111,7 @@ class build_ext(_build_ext): and get_abi3_suffix() ) if use_abi3: - so_ext = get_config_var('SO') + so_ext = get_config_var('EXT_SUFFIX') filename = filename[:-len(so_ext)] filename = filename + get_abi3_suffix() if isinstance(ext, Library): diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 5065661f..a3792ce2 100755 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -8,7 +8,7 @@ A tool for doing automatic download/extract/build of distutils-based Python packages. For detailed documentation, see the accompanying EasyInstall.txt file, or visit the `EasyInstall home page`__. -__ https://pythonhosted.org/setuptools/easy_install.html +__ https://setuptools.readthedocs.io/en/latest/easy_install.html """ @@ -49,8 +49,9 @@ from setuptools.sandbox import run_setup from setuptools.py31compat import get_path, get_config_vars from setuptools.command import setopt from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) from setuptools.command import bdist_egg, egg_info from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, @@ -512,7 +513,7 @@ class easy_install(Command): For information on other options, you may wish to consult the documentation at: - https://pythonhosted.org/setuptools/easy_install.html + https://setuptools.readthedocs.io/en/latest/easy_install.html Please make the appropriate changes for your system and try again. """).lstrip() @@ -1256,7 +1257,8 @@ class easy_install(Command): * You can set up the installation directory to support ".pth" files by using one of the approaches described here: - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + Please make the appropriate changes for your system and try again.""").lstrip() @@ -1521,15 +1523,6 @@ def get_exe_prefixes(exe_filename): return prefixes -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - - class PthDistributions(Environment): """A .pth file with Distribution paths in it""" diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py index 60b615d2..7834e107 100755 --- a/setuptools/command/install_egg_info.py +++ b/setuptools/command/install_egg_info.py @@ -82,9 +82,10 @@ class install_egg_info(Command): _nspkg_tmpl = ( "import sys, types, os", + "pep420 = sys.version_info > (3, 3)", "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", "ie = os.path.exists(os.path.join(p,'__init__.py'))", - "m = not ie and " + "m = not ie and not pep420 and " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", "mp = (m or []) and m.__dict__.setdefault('__path__',[])", "(p not in mp) and mp.append(p)", diff --git a/setuptools/command/py36compat.py b/setuptools/command/py36compat.py new file mode 100644 index 00000000..61063e75 --- /dev/null +++ b/setuptools/command/py36compat.py @@ -0,0 +1,136 @@ +import os +from glob import glob +from distutils.util import convert_path +from distutils.command import sdist + +from setuptools.extern.six.moves import filter + + +class sdist_add_defaults: + """ + Mix-in providing forward-compatibility for functionality as found in + distutils on Python 3.7. + + Do not edit the code in this class except to update functionality + as implemented in distutils. Instead, override in the subclass. + """ + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist_add_defaults._cs_path_exists(__file__) + True + >>> sdist_add_defaults._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + +if hasattr(sdist.sdist, '_add_defaults_standards'): + # disable the functionality already available upstream + class sdist_add_defaults: + pass diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 1d4f5d54..b85d7d03 100755 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -1,4 +1,3 @@ -from glob import glob from distutils import log import distutils.command.sdist as orig import os @@ -8,12 +7,10 @@ import contextlib from setuptools.extern import six -from setuptools.utils import cs_path_exists +from .py36compat import sdist_add_defaults import pkg_resources -READMES = 'README', 'README.rst', 'README.txt' - _default_revctrl = list @@ -24,7 +21,7 @@ def walk_revctrl(dirname=''): yield item -class sdist(orig.sdist): +class sdist(sdist_add_defaults, orig.sdist): """Smart sdist that finds anything supported by revision control""" user_options = [ @@ -40,6 +37,8 @@ class sdist(orig.sdist): negative_opt = {} + READMES = 'README', 'README.rst', 'README.txt' + def run(self): self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') @@ -126,35 +125,8 @@ class sdist(orig.sdist): if has_leaky_handle: read_template = __read_template_hack - def add_defaults(self): - standards = [READMES, - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if cs_path_exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if cs_path_exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = list(filter(cs_path_exists, glob(pattern))) - if files: - self.filelist.extend(files) - - # getting python files + def _add_defaults_python(self): + """getting python files""" if self.distribution.has_pure_modules(): build_py = self.get_finalized_command('build_py') self.filelist.extend(build_py.get_source_files()) @@ -167,26 +139,19 @@ class sdist(orig.sdist): self.filelist.extend([os.path.join(src_dir, filename) for filename in filenames]) - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) + def _add_defaults_data_files(self): + """ + Don't add any data files, but why? + """ def check_readme(self): - for f in READMES: + for f in self.READMES: if os.path.exists(f): return else: self.warn( "standard file not found: should have one of " + - ', '.join(READMES) + ', '.join(self.READMES) ) def make_release_tree(self, base_dir, files): diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 2d1adba8..38bbcd8b 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -1,10 +1,13 @@ +import os +import operator import sys import contextlib +import itertools from distutils.errors import DistutilsOptionError from unittest import TestLoader from setuptools.extern import six -from setuptools.extern.six.moves import map +from setuptools.extern.six.moves import map, filter from pkg_resources import (resource_listdir, resource_exists, normalize_path, working_set, _namespace_packages, @@ -112,7 +115,7 @@ class test(Command): func() @contextlib.contextmanager - def project_on_sys_path(self): + def project_on_sys_path(self, include_dists=[]): with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) if with_2to3: @@ -144,23 +147,57 @@ class test(Command): old_modules = sys.modules.copy() try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) working_set.__init__() add_activation_listener(lambda dist: dist.activate()) require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - yield + with self.paths_on_pythonpath([project_path]): + yield finally: sys.path[:] = old_path sys.modules.clear() sys.modules.update(old_modules) working_set.__init__() + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(paths) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires or []) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + return itertools.chain(ir_d, tr_d) + def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) + installed_dists = self.install_dists(self.distribution) cmd = ' '.join(self._argv) if self.dry_run: @@ -168,8 +205,11 @@ class test(Command): return self.announce('running "%s"' % cmd) - with self.project_on_sys_path(): - self.run_tests() + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() def run_tests(self): # Purge modules under test from sys.modules. The test loader will diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index ccc1c76f..269dc2d5 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -29,6 +29,10 @@ def _encode(s): class upload_docs(upload): + # override the default repository as upload_docs isn't + # supported by Warehouse (and won't be). + DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' + description = 'Upload documentation to PyPI' user_options = [ @@ -53,6 +57,7 @@ class upload_docs(upload): self.target_dir = None def finalize_options(self): + log.warn("Upload_docs command is deprecated. Use RTD instead.") upload.finalize_options(self) if self.upload_dir is None: if self.has_sphinx(): diff --git a/setuptools/dist.py b/setuptools/dist.py index 820df6d5..364f2b4d 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -2,14 +2,12 @@ __all__ = ['Distribution'] import re import os -import sys import warnings import numbers import distutils.log import distutils.core import distutils.cmd import distutils.dist -from distutils.core import Distribution as _Distribution from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError) from distutils.util import rfc822_escape @@ -20,100 +18,66 @@ from pkg_resources.extern import packaging from setuptools.depends import Require from setuptools import windows_support +from setuptools.monkey import get_unpatched import pkg_resources def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded + warnings.warn("Do not call this function", DeprecationWarning) + return get_unpatched(cls) - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - - -_Distribution = _get_unpatched(_Distribution) - - -def _patch_distribution_metadata_write_pkg_file(): - """Patch write_pkg_file to also write Requires-Python/Requires-External""" - - # Based on Python 3.5 version - def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ - version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): - version = '1.1' - # Setuptools specific for PEP 345 - if hasattr(self, 'python_requires'): - version = '1.2' - - file.write('Metadata-Version: %s\n' % version) - file.write('Name: %s\n' % self.get_name()) - file.write('Version: %s\n' % self.get_version()) - file.write('Summary: %s\n' % self.get_description()) - file.write('Home-page: %s\n' % self.get_url()) - file.write('Author: %s\n' % self.get_contact()) - file.write('Author-email: %s\n' % self.get_contact_email()) - file.write('License: %s\n' % self.get_license()) - if self.download_url: - file.write('Download-URL: %s\n' % self.download_url) - - long_desc = rfc822_escape(self.get_long_description()) - file.write('Description: %s\n' % long_desc) - - keywords = ','.join(self.get_keywords()) - if keywords: - file.write('Keywords: %s\n' % keywords) - - self._write_list(file, 'Platform', self.get_platforms()) - self._write_list(file, 'Classifier', self.get_classifiers()) - - # PEP 314 - self._write_list(file, 'Requires', self.get_requires()) - self._write_list(file, 'Provides', self.get_provides()) - self._write_list(file, 'Obsoletes', self.get_obsoletes()) - # Setuptools specific for PEP 345 - if hasattr(self, 'python_requires'): - file.write('Requires-Python: %s\n' % self.python_requires) - - distutils.dist.DistributionMetadata.write_pkg_file = write_pkg_file - - -_patch_distribution_metadata_write_pkg_file() - - -def _patch_distribution_metadata_write_pkg_info(): +# Based on Python 3.5 version +def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. """ - Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local - encoding to save the pkg_info. Monkey-patch its write_pkg_info method to - correct this undesirable behavior. + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + version = '1.2' + + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + file.write('Requires-Python: %s\n' % self.python_requires) + + +# from Python 3.4 +def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. """ - environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) - if not environment_local: - return - - # from Python 3.4 - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) - distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info - - -_patch_distribution_metadata_write_pkg_info() sequence = tuple, list @@ -245,6 +209,9 @@ def check_packages(dist, attr, value): ) +_Distribution = get_unpatched(distutils.core.Distribution) + + class Distribution(_Distribution): """Distribution with support for features, tests, and package data @@ -395,6 +362,7 @@ class Distribution(_Distribution): ) for dist in resolved_dists: pkg_resources.working_set.add(dist, replace=True) + return resolved_dists def finalize_options(self): _Distribution.finalize_options(self) @@ -792,11 +760,6 @@ class Distribution(_Distribution): sys.stdout.detach(), encoding, errors, newline, line_buffering) -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - class Feature: """ **deprecated** -- The `Feature` facility was never completely implemented diff --git a/setuptools/extension.py b/setuptools/extension.py index f8058b72..03068d35 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -1,4 +1,3 @@ -import sys import re import functools import distutils.core @@ -7,12 +6,7 @@ import distutils.extension from setuptools.extern.six.moves import map -from .dist import _get_unpatched -from . import msvc - -_Extension = _get_unpatched(distutils.core.Extension) - -msvc.patch_for_specialized_compiler() +from .monkey import get_unpatched def _have_cython(): @@ -33,6 +27,9 @@ def _have_cython(): have_pyrex = _have_cython +_Extension = get_unpatched(distutils.core.Extension) + + class Extension(_Extension): """Extension that uses '.c' files in place of '.pyx' files""" @@ -59,9 +56,3 @@ class Extension(_Extension): class Library(Extension): """Just like a regular Extension, but built as a library instead""" - - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/setuptools/monkey.py b/setuptools/monkey.py new file mode 100644 index 00000000..43b97b4d --- /dev/null +++ b/setuptools/monkey.py @@ -0,0 +1,187 @@ +""" +Monkey patching of distutils. +""" + +import sys +import distutils.filelist +import platform +import types +import functools + +from .py26compat import import_module +from setuptools.extern import six + +import setuptools + + +__all__ = [] +""" +Everything is private. Contact the project team +if you think you need this functionality. +""" + + +def get_unpatched(item): + lookup = ( + get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_function if isinstance(item, types.FunctionType) else + lambda item: None + ) + return lookup(item) + + +def get_unpatched_class(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + msg = "distutils has already been patched by %r" % cls + raise AssertionError(msg) + return cls + + +def patch_all(): + # we can't patch distutils.cmd, alas + distutils.core.Command = setuptools.Command + + has_issue_12885 = ( + sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if has_issue_12885: + # fix findall bug in distutils (http://bugs.python.org/issue12885) + distutils.filelist.findall = setuptools.findall + + needs_warehouse = ( + sys.version_info < (2, 7, 13) + or + (3, 0) < sys.version_info < (3, 3, 7) + or + (3, 4) < sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if needs_warehouse: + warehouse = 'https://upload.pypi.org/legacy/' + distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse + + _patch_distribution_metadata_write_pkg_file() + _patch_distribution_metadata_write_pkg_info() + + # Install Distribution throughout the distutils + for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = setuptools.dist.Distribution + + # Install the patched Extension + distutils.core.Extension = setuptools.extension.Extension + distutils.extension.Extension = setuptools.extension.Extension + if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = ( + setuptools.extension.Extension + ) + + patch_for_msvc_specialized_compiler() + + +def _patch_distribution_metadata_write_pkg_file(): + """Patch write_pkg_file to also write Requires-Python/Requires-External""" + distutils.dist.DistributionMetadata.write_pkg_file = ( + setuptools.dist.write_pkg_file + ) + + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + distutils.dist.DistributionMetadata.write_pkg_info = ( + setuptools.dist.write_pkg_info + ) + + +def patch_func(replacement, target_mod, func_name): + """ + Patch func_name in target_mod with replacement + + Important - original must be resolved by name to avoid + patching an already patched function. + """ + original = getattr(target_mod, func_name) + + # set the 'unpatched' attribute on the replacement to + # point to the original. + vars(replacement).setdefault('unpatched', original) + + # replace the function in the original module + setattr(target_mod, func_name, replacement) + + +def get_unpatched_function(candidate): + return getattr(candidate, 'unpatched') + + +def patch_for_msvc_specialized_compiler(): + """ + Patch functions in distutils to use standalone Microsoft Visual C++ + compilers. + """ + # import late to avoid circular imports on Python < 3.5 + msvc = import_module('setuptools.msvc') + + if platform.system() != 'Windows': + # Compilers only availables on Microsoft Windows + return + + def patch_params(mod_name, func_name): + """ + Prepare the parameters for patch_func to patch indicated function. + """ + repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' + repl_name = repl_prefix + func_name.lstrip('_') + repl = getattr(msvc, repl_name) + mod = import_module(mod_name) + if not hasattr(mod, func_name): + raise ImportError(func_name) + return repl, mod, func_name + + # Python 2.7 to 3.4 + msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') + + # Python 3.5+ + msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') + + try: + # Patch distutils.msvc9compiler + patch_func(*msvc9('find_vcvarsall')) + patch_func(*msvc9('query_vcvarsall')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler._get_vc_env + patch_func(*msvc14('_get_vc_env')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler.gen_lib_options for Numpy + patch_func(*msvc14('gen_lib_options')) + except ImportError: + pass diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 26e399cc..e9665e10 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -1,15 +1,31 @@ """ -This module adds improved support for Microsoft Visual C++ compilers. +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); + Microsoft Windows SDK 7.0 (x86, x64, ia64); + Microsoft Windows SDK 6.1 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) """ + import os import sys import platform import itertools import distutils.errors -from distutils.version import StrictVersion +from pkg_resources.extern.packaging.version import LegacyVersion from setuptools.extern.six.moves import filterfalse +from .monkey import get_unpatched + if platform.system() == 'Windows': from setuptools.extern.six.moves import winreg safe_env = os.environ @@ -26,74 +42,10 @@ else: safe_env = dict() try: - # Distutil file for MSVC++ 9.0 and upper (Python 2.7 to 3.4) - import distutils.msvc9compiler as msvc9compiler + from distutils.msvc9compiler import Reg except ImportError: pass -try: - # Distutil file for MSVC++ 14.0 and upper (Python 3.5+) - import distutils._msvccompiler as msvc14compiler -except ImportError: - pass - - -unpatched = dict() - - -def patch_for_specialized_compiler(): - """ - Patch functions in distutils to use standalone Microsoft Visual C++ - compilers. - - Known supported compilers: - -------------------------- - Microsoft Visual C++ 9.0: - Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); - Microsoft Windows SDK 7.0 (x86, x64, ia64); - Microsoft Windows SDK 6.1 (x86, x64, ia64) - - Microsoft Visual C++ 10.0: - Microsoft Windows SDK 7.1 (x86, x64, ia64) - - Microsoft Visual C++ 14.0: - Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) - """ - if platform.system() != 'Windows': - # Compilers only availables on Microsoft Windows - return - - if 'distutils' not in globals(): - # The module isn't available to be patched - return - - if unpatched: - # Already patched - return - - try: - # Patch distutils.msvc9compiler - unpatched['msvc9_find_vcvarsall'] = msvc9compiler.find_vcvarsall - msvc9compiler.find_vcvarsall = msvc9_find_vcvarsall - unpatched['msvc9_query_vcvarsall'] = msvc9compiler.query_vcvarsall - msvc9compiler.query_vcvarsall = msvc9_query_vcvarsall - except NameError: - pass - - try: - # Patch distutils._msvccompiler._get_vc_env - unpatched['msvc14_get_vc_env'] = msvc14compiler._get_vc_env - msvc14compiler._get_vc_env = msvc14_get_vc_env - except NameError: - pass - - try: - # Patch distutils._msvccompiler.gen_lib_options for Numpy - unpatched['msvc14_gen_lib_options'] = msvc14compiler.gen_lib_options - msvc14compiler.gen_lib_options = msvc14_gen_lib_options - except NameError: - pass - def msvc9_find_vcvarsall(version): """ @@ -117,7 +69,6 @@ def msvc9_find_vcvarsall(version): ------ vcvarsall.bat path: str """ - Reg = msvc9compiler.Reg VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' key = VC_BASE % ('', version) try: @@ -136,7 +87,7 @@ def msvc9_find_vcvarsall(version): if os.path.isfile(vcvarsall): return vcvarsall - return unpatched['msvc9_find_vcvarsall'](version) + return get_unpatched(msvc9_find_vcvarsall)(version) def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): @@ -169,7 +120,8 @@ def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): """ # Try to get environement from vcvarsall.bat (Classical way) try: - return unpatched['msvc9_query_vcvarsall'](ver, arch, *args, **kwargs) + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) except distutils.errors.DistutilsPlatformError: # Pass error if Vcvarsall.bat is missing pass @@ -208,7 +160,7 @@ def msvc14_get_vc_env(plat_spec): """ # Try to get environment from vcvarsall.bat (Classical way) try: - return unpatched['msvc14_get_vc_env'](plat_spec) + return get_unpatched(msvc14_get_vc_env)(plat_spec) except distutils.errors.DistutilsPlatformError: # Pass error Vcvarsall.bat is missing pass @@ -229,9 +181,9 @@ def msvc14_gen_lib_options(*args, **kwargs): """ if "numpy.distutils" in sys.modules: import numpy as np - if StrictVersion(np.__version__) < StrictVersion('1.11.2'): + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) - return unpatched['msvc14_gen_lib_options'](*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) def _augment_exception(exc, version, arch=''): diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 82cd608f..3e8d6818 100755 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -52,6 +52,15 @@ _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" user_agent = _tmpl.format(py_major=sys.version[:3], **globals()) +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + def parse_bdist_wininst(name): """Return (base,pyversion) or (None,None) for possible .exe name""" @@ -284,7 +293,7 @@ class PackageIndex(Environment): ca_bundle=None, verify_ssl=True, *args, **kw ): Environment.__init__(self, *args, **kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] + self.index_url = index_url + "/" [:not index_url.endswith('/')] self.scanned_urls = {} self.fetched_urls = {} self.package_pages = {} @@ -561,13 +570,7 @@ class PackageIndex(Environment): # Existing file or directory, just return it return spec else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) + spec = parse_requirement_arg(spec) return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) def fetch_distribution( diff --git a/setuptools/py26compat.py b/setuptools/py26compat.py index 90cd695a..5778cdf1 100644 --- a/setuptools/py26compat.py +++ b/setuptools/py26compat.py @@ -22,3 +22,10 @@ def strip_fragment(url): if sys.version_info >= (2, 7): strip_fragment = lambda x: x + + +try: + from importlib import import_module +except ImportError: + def import_module(module_name): + return __import__(module_name, fromlist=['__name__']) diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index df51b04f..6dc1b3ac 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -98,6 +98,15 @@ class TestFindPackages: packages = find_packages(self.dist_dir, exclude=('pkg.*',)) assert packages == ['pkg'] + def test_exclude_recursive(self): + """ + Excluding a parent package should not exclude child packages as well. + """ + self._touch('__init__.py', self.pkg_dir) + self._touch('__init__.py', self.sub_pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg',)) + assert packages == ['pkg.subpkg'] + def test_include_excludes_other(self): """ If include is specified, other packages should be excluded. diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py index 14e0f208..a0c76ea0 100644 --- a/setuptools/tests/test_msvc.py +++ b/setuptools/tests/test_msvc.py @@ -6,8 +6,6 @@ import os import contextlib import distutils.errors -from setuptools.extern import six - import pytest try: from unittest import mock diff --git a/setuptools/utils.py b/setuptools/utils.py deleted file mode 100644 index 080b9a8e..00000000 --- a/setuptools/utils.py +++ /dev/null @@ -1,11 +0,0 @@ -import os -import os.path - - -def cs_path_exists(fspath): - if not os.path.exists(fspath): - return False - # make absolute so we always have a directory - abspath = os.path.abspath(fspath) - directory, filename = os.path.split(abspath) - return filename in os.listdir(directory) @@ -2,5 +2,8 @@ envlist = py26,py27,py33,py34,py35,pypy,pypy3 [testenv] +deps= + pytest-flake8 + pytest>=3.0.2 passenv=APPDATA USERPROFILE HOMEDRIVE HOMEPATH windir -commands=python setup.py test +commands=python setup.py test --addopts='-rsx' |