diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 54721fc2d..d502f3261 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -75,7 +75,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.8, 3.9, "3.10", 3.11, 3.12] + python-version: [3.8, 3.9, "3.10", 3.11, 3.12, 3.13] os: [MacOS, Ubuntu, Windows] steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e2a0efd16..039d93da9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: '^(pipenv/patched/|pipenv/vendor/|tests/|pipenv/pipenv.1)' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - id: check-builtin-literals - id: check-added-large-files @@ -18,13 +18,13 @@ repos: exclude: .patch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.287 + rev: v0.6.7 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 24.8.0 hooks: - id: black @@ -53,11 +53,11 @@ repos: stages: [manual] - repo: https://github.com/tox-dev/pyproject-fmt - rev: "1.2.0" + rev: "2.2.4" hooks: - id: pyproject-fmt - repo: https://github.com/abravalheri/validate-pyproject - rev: v0.14 + rev: v0.19 hooks: - id: validate-pyproject diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f41e4a00..6d3b36d6c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,34 @@ +2024.0.2 (2024-09-13) +===================== +Pipenv 2024.0.2 (2024-09-13) +============================ + + +Features & Improvements +----------------------- + +- Initial support for python3.13 `#6240 `_ + +Bug Fixes +--------- + +- Fix bump version in CI/CD pipeline `#6177 `_ +- Swap old_version and new_version in pipenv update --outdated output. `#6179 `_ +- Update shell detection to only check the end of the command used. `#6197 `_ +- Fix loading dot env twice #6198 `#6202 `_ +- Solve issue with quiet lock not writing the lock file #6207. `#6207 `_ +- Fix regression introduced with the "smarter uninstall" PR. Uninstall ``--all`` should not clear the Pipfile entries. `#6209 `_ +- Fixed regression where all local file installations were incorrectly treated as editable. Ensure that local file installations are explicitly marked as editable in both Pipfile and Pipfile.lock entries if editable installation is desired. `#6222 `_ +- Corrected behavior of ``pipenv install --skip-lock`` after behavioral install refactor introduced regression. No Pipfile.lock is generated with this fix and installation of vcs no longer fails with revision missing error. `#6225 `_ +- Fix for Windows on ARM missing distlib binaries in pyproject.toml `#6240 `_ + +Vendored Libraries +------------------ + +- Clean up usage of click styling from exceptions, shell and virtualenv `#6178 `_ +- Remove click.echo from pipenv/cli `#6182 `_ +- Remove click.echo from exceptions.py `#6216 `_ +- Update importlib-metadata to 8.4.0 `#6235 `_ 2024.0.1 (2024-06-11) ===================== Pipenv 2024.0.1 (2024-06-11) diff --git a/Pipfile b/Pipfile index 7b4a129f1..f5155f8e6 100644 --- a/Pipfile +++ b/Pipfile @@ -9,7 +9,6 @@ sphinx = "*" sphinx-click = "==4.*" sphinxcontrib-spelling = "==7.*" click = "==8.0.3" -pypiserver = "==1.*" stdeb = {version="*", sys_platform = "== 'linux'"} zipp = {version = "==3.6.0", markers = "python_version < '3.10'"} pre-commit = "==2.*" @@ -29,6 +28,7 @@ pyyaml = "==6.0.1" build = "*" twine = "*" semver = "*" +pypiserver = {ref = "pipenv-313", git = "https://github.com/matteius/pypiserver.git"} [packages] pytz = "*" diff --git a/Pipfile.lock b/Pipfile.lock index ff9e323ae..7f5be87fe 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b8d2126bc8bb139755c193b41d494c886fe5560760a5cddee992db697707a88d" + "sha256": "2ec5439e5085c244b5ba09d4b14499978736d6631395ebbceb4ee026c1aa4223" }, "pipfile-spec": 6, "requires": {}, @@ -16,21 +16,21 @@ "default": { "pytz": { "hashes": [ - "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b", - "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7" + "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", + "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725" ], "index": "pypi", - "version": "==2023.3.post1" + "version": "==2024.2" } }, "develop": { "alabaster": { "hashes": [ - "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", - "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92" + "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", + "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b" ], - "markers": "python_version >= '3.9'", - "version": "==0.7.16" + "markers": "python_version >= '3.10'", + "version": "==1.0.0" }, "arpeggio": { "hashes": [ @@ -47,26 +47,34 @@ }, "attrs": { "hashes": [ - "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30", - "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1" + "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", + "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2" ], "markers": "python_version >= '3.7'", - "version": "==23.2.0" + "version": "==24.2.0" }, "babel": { "hashes": [ - "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363", - "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287" + "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", + "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316" ], - "markers": "python_version >= '3.7'", - "version": "==2.14.0" + "markers": "python_version >= '3.8'", + "version": "==2.16.0" + }, + "backports.tarfile": { + "hashes": [ + "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", + "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991" + ], + "markers": "python_version < '3.12'", + "version": "==1.2.0" }, "beautifulsoup4": { "hashes": [ - "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", - "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed" + "sha256:7e05ad0b6c26108d9990e2235e8a9b4e2c03ead6f391ceb60347f8ebea6b80ba", + "sha256:c684ddec071aa120819889aa9e8940f85c3f3cdaa08e23b9fa26510387897bd5" ], - "version": "==4.12.3" + "version": "==4.13.0b2" }, "black": { "hashes": [ @@ -100,20 +108,93 @@ }, "build": { "hashes": [ - "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b", - "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f" + "sha256:119b2fb462adef986483438377a13b2f42064a2a3a4161f24a0cca698a07ac8c", + "sha256:277ccc71619d98afdd841a0e96ac9fe1593b823af481d3b0cea748e8894e0613" ], "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==1.0.3" + "markers": "python_version >= '3.8'", + "version": "==1.2.2" }, "certifi": { "hashes": [ - "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1", - "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474" + "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", + "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" ], "markers": "python_version >= '3.6'", - "version": "==2023.11.17" + "version": "==2024.8.30" + }, + "cffi": { + "hashes": [ + "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", + "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", + "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1", + "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", + "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", + "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", + "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", + "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", + "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", + "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", + "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc", + "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", + "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", + "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", + "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", + "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", + "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", + "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", + "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", + "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b", + "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", + "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", + "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c", + "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", + "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", + "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", + "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8", + "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1", + "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", + "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", + "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", + "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", + "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", + "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", + "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", + "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", + "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", + "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", + "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", + "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", + "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", + "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", + "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", + "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964", + "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", + "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", + "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", + "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", + "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", + "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", + "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", + "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", + "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", + "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", + "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", + "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", + "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", + "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9", + "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", + "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", + "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", + "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", + "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", + "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", + "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", + "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", + "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b" + ], + "markers": "platform_python_implementation != 'PyPy'", + "version": "==1.17.1" }, "cfgv": { "hashes": [ @@ -241,61 +322,114 @@ "toml" ], "hashes": [ - "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca", - "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471", - "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a", - "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058", - "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85", - "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143", - "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446", - "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590", - "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a", - "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105", - "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9", - "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a", - "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac", - "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25", - "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2", - "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450", - "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932", - "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba", - "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137", - "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae", - "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614", - "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70", - "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e", - "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505", - "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870", - "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc", - "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451", - "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7", - "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e", - "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566", - "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5", - "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26", - "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2", - "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42", - "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555", - "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43", - "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed", - "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa", - "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516", - "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952", - "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd", - "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09", - "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c", - "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f", - "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6", - "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1", - "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0", - "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e", - "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9", - "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9", - "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e", - "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06" + "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", + "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", + "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", + "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", + "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", + "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", + "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", + "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", + "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", + "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", + "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", + "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", + "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", + "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", + "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", + "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", + "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", + "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", + "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", + "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", + "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", + "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", + "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", + "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", + "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", + "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", + "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", + "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", + "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", + "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", + "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", + "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", + "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", + "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", + "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", + "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", + "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", + "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", + "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", + "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", + "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", + "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", + "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", + "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", + "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", + "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", + "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", + "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", + "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", + "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", + "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", + "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", + "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", + "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", + "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", + "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", + "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", + "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", + "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", + "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", + "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", + "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", + "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", + "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", + "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", + "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", + "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", + "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", + "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", + "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", + "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", + "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc" ], "markers": "python_version >= '3.8'", - "version": "==7.4.0" + "version": "==7.6.1" + }, + "cryptography": { + "hashes": [ + "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494", + "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806", + "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d", + "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062", + "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2", + "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4", + "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1", + "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85", + "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84", + "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042", + "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d", + "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962", + "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2", + "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa", + "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d", + "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365", + "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96", + "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47", + "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d", + "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d", + "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c", + "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb", + "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277", + "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172", + "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034", + "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a", + "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289" + ], + "markers": "python_version >= '3.7'", + "version": "==43.0.1" }, "distlib": { "hashes": [ @@ -306,11 +440,11 @@ }, "docutils": { "hashes": [ - "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", - "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b" + "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", + "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2" ], - "markers": "python_version >= '3.7'", - "version": "==0.20.1" + "markers": "python_version >= '3.9'", + "version": "==0.21.2" }, "exceptiongroup": { "hashes": [ @@ -323,19 +457,19 @@ }, "execnet": { "hashes": [ - "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41", - "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af" + "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", + "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3" ], - "markers": "python_version >= '3.7'", - "version": "==2.0.2" + "markers": "python_version >= '3.8'", + "version": "==2.1.1" }, "filelock": { "hashes": [ - "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e", - "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c" + "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec", + "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609" ], "markers": "python_version >= '3.8'", - "version": "==3.13.1" + "version": "==3.16.0" }, "flake8": { "hashes": [ @@ -346,34 +480,34 @@ }, "flaky": { "hashes": [ - "sha256:3ad100780721a1911f57a165809b7ea265a7863305acb66708220820caf8aa0d", - "sha256:d6eda73cab5ae7364504b7c44670f70abed9e75f77dd116352f662817592ec9c" + "sha256:194ccf4f0d3a22b2de7130f4b62e45e977ac1b5ccad74d4d48f3005dcc38815e", + "sha256:47204a81ec905f3d5acfbd61daeabcada8f9d4031616d9bcb0618461729699f5" ], - "version": "==3.7.0" + "version": "==3.8.1" }, "gunicorn": { "hashes": [ - "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0", - "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033" + "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", + "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec" ], - "markers": "python_version >= '3.5' and sys_platform == 'linux'", - "version": "==21.2.0" + "markers": "python_version >= '3.7' and sys_platform == 'linux'", + "version": "==23.0.0" }, "identify": { "hashes": [ - "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d", - "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34" + "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf", + "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0" ], "markers": "python_version >= '3.8'", - "version": "==2.5.33" + "version": "==2.6.0" }, "idna": { "hashes": [ - "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", - "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" + "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", + "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" ], - "markers": "python_version >= '3.5'", - "version": "==3.6" + "markers": "python_version >= '3.6'", + "version": "==3.8" }, "imagesize": { "hashes": [ @@ -385,19 +519,20 @@ }, "importlib-metadata": { "hashes": [ - "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", - "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2" + "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", + "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==7.1.0" + "version": "==8.5.0" }, - "incremental": { + "importlib-resources": { "hashes": [ - "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0", - "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51" + "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065", + "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717" ], - "version": "==22.10.0" + "markers": "python_version < '3.12' and python_version >= '3.9'", + "version": "==6.4.5" }, "iniconfig": { "hashes": [ @@ -418,108 +553,132 @@ }, "jaraco.classes": { "hashes": [ - "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb", - "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621" + "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", + "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790" + ], + "markers": "python_version >= '3.8'", + "version": "==3.4.0" + }, + "jaraco.context": { + "hashes": [ + "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", + "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4" + ], + "markers": "python_version >= '3.8'", + "version": "==6.0.1" + }, + "jaraco.functools": { + "hashes": [ + "sha256:3460c74cd0d32bf82b9576bbb3527c4364d5b27a21f5158a62aed6c4b42e23f5", + "sha256:c9d16a3ed4ccb5a889ad8e0b7a343401ee5b2a71cee6ed192d3f68bc351e94e3" ], "markers": "python_version >= '3.8'", - "version": "==3.3.0" + "version": "==4.0.2" + }, + "jeepney": { + "hashes": [ + "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806", + "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755" + ], + "markers": "sys_platform == 'linux'", + "version": "==0.8.0" }, "jinja2": { "hashes": [ - "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa", - "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90" + "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", + "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d" ], "markers": "python_version >= '3.7'", - "version": "==3.1.3" + "version": "==3.1.4" }, "keyring": { "hashes": [ - "sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836", - "sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25" + "sha256:8d85a1ea5d6db8515b59e1c5d1d1678b03cf7fc8b8dcfb1651e8c4a524eb42ef", + "sha256:8d963da00ccdf06e356acd9bf3b743208878751032d8599c6cc89eb51310ffae" ], "markers": "python_version >= '3.8'", - "version": "==24.3.0" + "version": "==25.3.0" }, "linkify-it-py": { "hashes": [ - "sha256:11e29f00150cddaa8f434153f103c14716e7e097a8fd372d9eb1ed06ed91524d", - "sha256:2b3f168d5ce75e3a425e34b341a6b73e116b5d9ed8dbbbf5dc7456843b7ce2ee" + "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", + "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79" ], - "version": "==1.0.3" + "version": "==2.0.3" }, "markdown-it-py": { "hashes": [ - "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", - "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1" + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" ], - "markers": "python_version >= '3.7'", - "version": "==2.2.0" + "markers": "python_version >= '3.8'", + "version": "==3.0.0" }, "markupsafe": { "hashes": [ - "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69", - "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0", - "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d", - "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec", - "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5", - "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411", - "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3", - "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74", - "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0", - "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949", - "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d", - "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279", - "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f", - "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6", - "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc", - "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e", - "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954", - "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656", - "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc", - "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518", - "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56", - "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc", - "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa", - "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565", - "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4", - "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb", - "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250", - "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4", - "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959", - "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc", - "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474", - "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863", - "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8", - "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f", - "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2", - "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e", - "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e", - "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb", - "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f", - "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a", - "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26", - "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d", - "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2", - "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131", - "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789", - "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6", - "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a", - "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858", - "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e", - "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb", - "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e", - "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84", - "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7", - "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea", - "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b", - "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6", - "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475", - "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74", - "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a", - "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00" + "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf", + "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", + "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", + "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3", + "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532", + "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", + "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", + "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df", + "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4", + "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", + "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", + "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", + "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8", + "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371", + "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2", + "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465", + "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52", + "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6", + "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", + "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", + "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", + "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0", + "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029", + "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", + "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a", + "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", + "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", + "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", + "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf", + "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9", + "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", + "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", + "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3", + "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", + "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46", + "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", + "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a", + "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", + "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", + "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", + "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea", + "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", + "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", + "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e", + "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", + "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f", + "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50", + "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", + "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", + "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", + "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff", + "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2", + "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", + "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", + "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf", + "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", + "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5", + "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab", + "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", + "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68" ], "markers": "python_version >= '3.7'", - "version": "==2.1.4" + "version": "==2.1.5" }, "mccabe": { "hashes": [ @@ -530,11 +689,11 @@ }, "mdit-py-plugins": { "hashes": [ - "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e", - "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a" + "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", + "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5" ], - "markers": "python_version >= '3.7'", - "version": "==0.3.5" + "markers": "python_version >= '3.8'", + "version": "==0.4.2" }, "mdurl": { "hashes": [ @@ -553,11 +712,11 @@ }, "more-itertools": { "hashes": [ - "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684", - "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1" + "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef", + "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6" ], "markers": "python_version >= '3.8'", - "version": "==10.2.0" + "version": "==10.5.0" }, "mypy-extensions": { "hashes": [ @@ -572,56 +731,56 @@ "linkify" ], "hashes": [ - "sha256:61b275b85d9f58aa327f370913ae1bec26ebad372cc99f3ab85c8ec3ee8d9fb8", - "sha256:79317f4bb2c13053dd6e64f9da1ba1da6cd9c40c8a430c447a7b146a594c246d" + "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", + "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d" ], - "markers": "python_version >= '3.7'", - "version": "==0.18.1" + "markers": "python_version >= '3.10'", + "version": "==4.0.0" }, "nh3": { "hashes": [ - "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770", - "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf", - "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305", - "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601", - "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28", - "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7", - "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3", - "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911", - "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf", - "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0", - "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5", - "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97", - "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d", - "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e", - "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3", - "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6" - ], - "version": "==0.2.15" + "sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164", + "sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86", + "sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b", + "sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad", + "sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204", + "sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a", + "sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200", + "sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189", + "sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f", + "sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811", + "sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844", + "sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4", + "sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be", + "sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50", + "sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307", + "sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe" + ], + "version": "==0.2.18" }, "nodeenv": { "hashes": [ - "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2", - "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec" + "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", + "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==1.8.0" + "version": "==1.9.1" }, "packaging": { "hashes": [ - "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", - "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" ], - "markers": "python_version >= '3.7'", - "version": "==23.2" + "markers": "python_version >= '3.8' and sys_platform == 'linux'", + "version": "==24.1" }, "parse": { "hashes": [ - "sha256:5e171b001452fa9f004c5a58a93525175468daf69b493e9fa915347ed7ff6968", - "sha256:bd28bae37714b45d5894d77160a16e2be36b64a3b618c81168b3684676aa498b" + "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558", + "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce" ], "index": "pypi", - "version": "==1.20.0" + "version": "==1.20.2" }, "parver": { "hashes": [ @@ -640,11 +799,11 @@ }, "pip": { "hashes": [ - "sha256:5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76", - "sha256:7fd9972f96db22c8077a1ee2691b172c8089b17a5652a44494a9ecb0d78f9149" + "sha256:2cd581cf58ab7fcfca4ce8efa6dcacd0de5bf8d0a3eb9ec927e07405f4d9e2a2", + "sha256:5b5e490b5e9cb275c879595064adce9ebd31b854e3e803740b72f9ccf34a45b8" ], - "markers": "python_version >= '3.7'", - "version": "==23.3.2" + "markers": "python_version >= '3.8'", + "version": "==24.2" }, "pipenv": { "editable": true, @@ -652,31 +811,32 @@ "dev", "tests" ], + "markers": "python_version >= '3.8'", "path": "." }, "pkginfo": { "hashes": [ - "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546", - "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046" + "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297", + "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097" ], "markers": "python_version >= '3.6'", - "version": "==1.9.6" + "version": "==1.10.0" }, "platformdirs": { "hashes": [ - "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380", - "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420" + "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c", + "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617" ], "markers": "python_version >= '3.8'", - "version": "==4.1.0" + "version": "==4.3.2" }, "pluggy": { "hashes": [ - "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", - "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" ], "markers": "python_version >= '3.8'", - "version": "==1.3.0" + "version": "==1.5.0" }, "pre-commit": { "hashes": [ @@ -695,6 +855,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.7.0" }, + "pycparser": { + "hashes": [ + "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", + "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc" + ], + "markers": "python_version >= '3.8'", + "version": "==2.22" + }, "pyenchant": { "hashes": [ "sha256:0314d162b7af83adc500f5aff850c91466129363ca8c4d79a8b8d99253346204", @@ -715,36 +883,32 @@ }, "pygments": { "hashes": [ - "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c", - "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367" + "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", + "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" ], - "markers": "python_version >= '3.7'", - "version": "==2.17.2" + "markers": "python_version >= '3.8'", + "version": "==2.18.0" }, "pypiserver": { - "hashes": [ - "sha256:09f2f797f92b30e92287821e2dc3ca72c8011aec6a2570019254adf98318ee5c", - "sha256:70760efadc3d89b3e1b3f54f078a6520f6c6a0c3dd718b46cd0cf466c9fd01b2" - ], - "index": "pypi", + "git": "https://github.com/matteius/pypiserver.git", "markers": "python_version >= '3.6'", - "version": "==1.5.2" + "ref": "02f1ef9383f6e91d51a0f011c34a5df4816897a0" }, "pyproject-hooks": { "hashes": [ - "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8", - "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5" + "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965", + "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2" ], "markers": "python_version >= '3.7'", - "version": "==1.0.0" + "version": "==1.1.0" }, "pytest": { "hashes": [ - "sha256:42ed2f917ded90ceb752dbe2ecb48c436c2a70d38bc16018c2d11da6426a18b6", - "sha256:efc82dc5e6f2f41ae5acb9eabdf2ced192f336664c436b24a7db2c6aaafe4efd" + "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", + "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2" ], "markers": "python_version >= '3.8'", - "version": "==8.0.0rc2" + "version": "==8.3.3" }, "pytest-cov": { "hashes": [ @@ -757,25 +921,17 @@ }, "pytest-timeout": { "hashes": [ - "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90", - "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2" + "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9", + "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e" ], - "version": "==2.2.0" + "version": "==2.3.1" }, "pytest-xdist": { "hashes": [ - "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a", - "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24" + "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7", + "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d" ], - "version": "==3.5.0" - }, - "pywin32-ctypes": { - "hashes": [ - "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60", - "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7" - ], - "markers": "sys_platform == 'win32'", - "version": "==0.2.2" + "version": "==3.6.1" }, "pyyaml": { "hashes": [ @@ -837,19 +993,19 @@ }, "readme-renderer": { "hashes": [ - "sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d", - "sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1" + "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", + "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1" ], - "markers": "python_version >= '3.8'", - "version": "==42.0" + "markers": "python_version >= '3.9'", + "version": "==44.0" }, "requests": { "hashes": [ - "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", - "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" ], - "markers": "python_version >= '3.7'", - "version": "==2.31.0" + "markers": "python_version >= '3.8'", + "version": "==2.32.3" }, "requests-toolbelt": { "hashes": [ @@ -869,11 +1025,19 @@ }, "rich": { "hashes": [ - "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa", - "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235" + "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06", + "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a" ], "markers": "python_full_version >= '3.7.0'", - "version": "==13.7.0" + "version": "==13.8.1" + }, + "secretstorage": { + "hashes": [ + "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", + "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99" + ], + "markers": "sys_platform == 'linux'", + "version": "==3.3.3" }, "semver": { "hashes": [ @@ -886,11 +1050,11 @@ }, "setuptools": { "hashes": [ - "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05", - "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78" + "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308", + "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6" ], "markers": "python_version >= '3.8'", - "version": "==69.0.3" + "version": "==74.1.2" }, "snowballstemmer": { "hashes": [ @@ -901,20 +1065,20 @@ }, "soupsieve": { "hashes": [ - "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690", - "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7" + "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", + "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9" ], "markers": "python_version >= '3.8'", - "version": "==2.5" + "version": "==2.6" }, "sphinx": { "hashes": [ - "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560", - "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5" + "sha256:0cce1ddcc4fd3532cf1dd283bc7d886758362c5c1de6598696579ce96d8ffa5b", + "sha256:56173572ae6c1b9a38911786e206a110c9749116745873feae4f9ce88e59391d" ], "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==7.2.6" + "markers": "python_version >= '3.10'", + "version": "==8.0.2" }, "sphinx-click": { "hashes": [ @@ -927,27 +1091,27 @@ }, "sphinxcontrib-applehelp": { "hashes": [ - "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619", - "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4" + "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", + "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5" ], "markers": "python_version >= '3.9'", - "version": "==1.0.8" + "version": "==2.0.0" }, "sphinxcontrib-devhelp": { "hashes": [ - "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f", - "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3" + "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", + "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2" ], "markers": "python_version >= '3.9'", - "version": "==1.0.6" + "version": "==2.0.0" }, "sphinxcontrib-htmlhelp": { "hashes": [ - "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015", - "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04" + "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", + "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9" ], "markers": "python_version >= '3.9'", - "version": "==2.0.5" + "version": "==2.1.0" }, "sphinxcontrib-jsmath": { "hashes": [ @@ -959,19 +1123,19 @@ }, "sphinxcontrib-qthelp": { "hashes": [ - "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6", - "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182" + "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", + "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb" ], "markers": "python_version >= '3.9'", - "version": "==1.0.7" + "version": "==2.0.0" }, "sphinxcontrib-serializinghtml": { "hashes": [ - "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7", - "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f" + "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", + "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d" ], "markers": "python_version >= '3.9'", - "version": "==1.1.10" + "version": "==2.0.0" }, "sphinxcontrib-spelling": { "hashes": [ @@ -999,60 +1163,60 @@ }, "towncrier": { "hashes": [ - "sha256:13937c247e3f8ae20ac44d895cf5f96a60ad46cfdcc1671759530d7837d9ee5d", - "sha256:2e519ca619426d189e3c98c99558fe8be50c9ced13ea1fc20a4a353a95d2ded7" + "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3", + "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d" ], - "version": "==23.11.0" + "version": "==24.8.0" }, "twine": { "hashes": [ - "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8", - "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8" + "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997", + "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db" ], "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==4.0.2" + "markers": "python_version >= '3.8'", + "version": "==5.1.1" }, "typing-extensions": { "hashes": [ - "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783", - "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd" + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==4.9.0" + "version": "==4.12.2" }, "uc-micro-py": { "hashes": [ - "sha256:30ae2ac9c49f39ac6dce743bd187fcd2b574b16ca095fa74cd9396795c954c54", - "sha256:8c9110c309db9d9e87302e2f4ad2c3152770930d88ab385cd544e7a7e75f3de0" + "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", + "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5" ], "markers": "python_version >= '3.7'", - "version": "==1.0.2" + "version": "==1.0.3" }, "urllib3": { "hashes": [ - "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3", - "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54" + "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", + "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9" ], "markers": "python_version >= '3.8'", - "version": "==2.1.0" + "version": "==2.2.3" }, "virtualenv": { "hashes": [ - "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3", - "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b" + "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55", + "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c" ], "markers": "python_version >= '3.7'", - "version": "==20.25.0" + "version": "==20.26.4" }, "waitress": { "hashes": [ - "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a", - "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba" + "sha256:005da479b04134cdd9dd602d1ee7c49d79de0537610d653674cc6cbde222b8a1", + "sha256:2a06f242f4ba0cc563444ca3d1998959447477363a2d7e9b8b4d75d35cfd1669" ], - "markers": "python_full_version >= '3.7.0' and sys_platform == 'win32'", - "version": "==2.1.2" + "markers": "python_full_version >= '3.8.0' and sys_platform == 'win32'", + "version": "==3.0.0" }, "zipp": { "hashes": [ diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..da9c516dd --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/news/6151.bugfix.rst b/news/6151.bugfix.rst new file mode 100644 index 000000000..dc9ba2f51 --- /dev/null +++ b/news/6151.bugfix.rst @@ -0,0 +1 @@ +Disable ``ResourceWarning`` warning for temporary files that are cleaned on program exit. diff --git a/news/6171.bugfix.rst b/news/6171.bugfix.rst new file mode 100644 index 000000000..5ff0d0a53 --- /dev/null +++ b/news/6171.bugfix.rst @@ -0,0 +1 @@ +Fixed package sorting when installing a package with extras. diff --git a/news/6177.bugfix.rst b/news/6177.bugfix.rst deleted file mode 100644 index c4427c206..000000000 --- a/news/6177.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix bump version in CI/CD pipeline diff --git a/news/6178.vendor.rst b/news/6178.vendor.rst deleted file mode 100644 index bfcd062e4..000000000 --- a/news/6178.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Clean up usage of click styling from exceptions, shell and virtualenv diff --git a/news/6182.vendor.rst b/news/6182.vendor.rst deleted file mode 100644 index 245d8fcb3..000000000 --- a/news/6182.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Remove click.echo from pipenv/cli diff --git a/news/6185.bugfix.rst b/news/6185.bugfix.rst new file mode 100644 index 000000000..bc0bd085e --- /dev/null +++ b/news/6185.bugfix.rst @@ -0,0 +1 @@ +Fixed ``pipenv uninstall --all`` failing when the virtual environment no longer exists. diff --git a/news/6199.trivial.rst b/news/6199.trivial.rst deleted file mode 100644 index c6ba02635..000000000 --- a/news/6199.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Fix incorrect command reminder after installs. diff --git a/news/6202.bugfix.rst b/news/6202.bugfix.rst deleted file mode 100644 index 92f107be4..000000000 --- a/news/6202.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix loading dot env twice #6198 diff --git a/news/6242.bugfix.rst b/news/6242.bugfix.rst new file mode 100644 index 000000000..ef09c8c53 --- /dev/null +++ b/news/6242.bugfix.rst @@ -0,0 +1 @@ +Fix issue where installing a vcs dependency using pipenv CLI yielded the wrong Pipfile entry such that it could not lock. diff --git a/news/6243.bugfix.rst b/news/6243.bugfix.rst new file mode 100644 index 000000000..d66e075be --- /dev/null +++ b/news/6243.bugfix.rst @@ -0,0 +1 @@ +Fix report that pipenv requires ``packaging>=22`` on some systems by setting it as a dependency. diff --git a/pipenv/__version__.py b/pipenv/__version__.py index 82660d7fb..01a40efe5 100644 --- a/pipenv/__version__.py +++ b/pipenv/__version__.py @@ -2,4 +2,4 @@ # // ) ) / / // ) ) //___) ) // ) ) || / / # //___/ / / / //___/ / // // / / || / / # // / / // ((____ // / / ||/ / -__version__ = "2024.0.1" +__version__ = "2024.0.2" diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 9924e541d..247672d81 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -344,7 +344,8 @@ def lock(ctx, state, **kwargs): clear=state.clear, pre=pre, pypi_mirror=state.pypi_mirror, - write=not state.quiet, + write=True, + quiet=state.quiet, categories=state.installstate.categories, ) diff --git a/pipenv/environment.py b/pipenv/environment.py index d75243894..a93817303 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -19,6 +19,7 @@ from pipenv.patched.pip._vendor.packaging.specifiers import SpecifierSet from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name from pipenv.patched.pip._vendor.packaging.version import parse as parse_version +from pipenv.patched.pip._vendor.typing_extensions import Iterable from pipenv.utils import console from pipenv.utils.fileutils import normalize_path, temp_path from pipenv.utils.funktools import chunked, unnest @@ -72,8 +73,9 @@ def __init__( pipfile = project.parsed_pipfile self.pipfile = pipfile self.extra_dists = [] - prefix = prefix if prefix else sys.prefix - self.prefix = Path(prefix) + if self.is_venv and prefix is not None and not Path(prefix).exists(): + return + self.prefix = Path(prefix if prefix else sys.prefix) self._base_paths = {} if self.is_venv: self._base_paths = self.get_paths() @@ -96,11 +98,14 @@ def safe_import(self, name: str) -> ModuleType: return module @cached_property - def python_version(self) -> str: - with self.activated(): - sysconfig = self.safe_import("sysconfig") - py_version = sysconfig.get_python_version() - return py_version + def python_version(self) -> str | None: + with self.activated() as active: + if active: + sysconfig = self.safe_import("sysconfig") + py_version = sysconfig.get_python_version() + return py_version + else: + return None @property def python_info(self) -> dict[str, str]: @@ -703,9 +708,10 @@ def reverse_dependencies(self): } return rdeps - def get_working_set(self): + def get_working_set(self) -> Iterable: """Retrieve the working set of installed packages for the environment.""" - + if not hasattr(self, "sys_path"): + return [] return importlib_metadata.distributions(path=self.sys_path) def is_installed(self, pkgname): @@ -781,6 +787,16 @@ def activated(self): to `os.environ["PATH"]` to ensure that calls to `~Environment.run()` use the environment's path preferentially. """ + + # Fail if the virtualenv is needed but cannot be found + if self.is_venv and ( + hasattr(self, "prefix") + and not self.prefix.exists() + or not hasattr(self, "prefix") + ): + yield False + return + original_path = sys.path original_prefix = sys.prefix prefix = self.prefix.as_posix() @@ -806,7 +822,7 @@ def activated(self): sys.path = self.sys_path sys.prefix = self.sys_prefix try: - yield + yield True finally: sys.path = original_path sys.prefix = original_prefix diff --git a/pipenv/environments.py b/pipenv/environments.py index 8add6f861..e424b9e6b 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -182,7 +182,7 @@ def __init__(self) -> None: """ # NOTE: +1 because of a temporary bug in Pipenv. - self.PIPENV_MAX_DEPTH = int(get_from_env("MAX_DEPTH", default=3)) + 1 + self.PIPENV_MAX_DEPTH = int(get_from_env("MAX_DEPTH", default=10)) + 1 """Maximum number of directories to recursively search for a Pipfile. Default is 3. See also ``PIPENV_NO_INHERIT``. diff --git a/pipenv/pipenv.1 b/pipenv/pipenv.1 index c8a683f1a..4b2d62560 100644 --- a/pipenv/pipenv.1 +++ b/pipenv/pipenv.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "PIPENV" "1" "Jun 11, 2024" "2024.0.1" "pipenv" +.TH "PIPENV" "1" "Sep 13, 2024" "2024.0.2" "pipenv" .sp \fBNOTE:\fP .INDENT 0.0 diff --git a/pipenv/project.py b/pipenv/project.py index a6bcc87a7..ec692fa6d 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -15,6 +15,8 @@ from urllib.parse import unquote, urljoin from pipenv.utils.constants import VCS_LIST +from pipenv.utils.dependencies import extract_vcs_url +from pipenv.vendor.tomlkit.items import SingleKey, Table try: import tomllib as toml @@ -1107,12 +1109,23 @@ def get_package_name_in_pipfile(self, package_name, category): return name return None - def _sort_category(self, category): - # toml tables won't maintain sorted dictionary order - # so construct the table in the order that we need + def _sort_category(self, category) -> Table: + # copy table or create table from dict-like object table = tomlkit.table() - for key, value in sorted(category.items()): - table.add(key, value) + if isinstance(category, Table): + table.update(category.value) + else: + table.update(category) + + # sort the table internally + table._value._body.sort(key=lambda t: t[0] and t[0].key or "") + for index, (key, _) in enumerate(table._value._body): + assert isinstance(key, SingleKey) + indices = table._value._map[key] + if isinstance(indices, tuple): + table._value._map[key] = (index,) + indices[1:] + else: + table._value._map[key] = index return table @@ -1198,7 +1211,9 @@ def generate_package_pipfile_entry(self, package, pip_line, category=None): vcs_parts = vcs_part.rsplit("@", 1) if len(vcs_parts) > 1: entry["ref"] = vcs_parts[1].split("#", 1)[0].strip() - entry[vcs] = vcs_parts[0].strip() + vcs_url = vcs_parts[0].strip() + vcs_url = extract_vcs_url(vcs_url) + entry[vcs] = vcs_url # Check and extract subdirectory fragment if package.link.subdirectory_fragment: diff --git a/pipenv/resolver.py b/pipenv/resolver.py index ef4e11b30..abf9074d9 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -3,11 +3,7 @@ import logging import os import sys - -try: - from functools import cached_property -except ImportError: - cached_property = property +from functools import cached_property def _ensure_modules(): diff --git a/pipenv/routines/install.py b/pipenv/routines/install.py index e299ef7e5..6f6281be5 100644 --- a/pipenv/routines/install.py +++ b/pipenv/routines/install.py @@ -48,7 +48,7 @@ def do_install( requirements_directory = fileutils.create_tracked_tempdir( suffix="-requirements", prefix="pipenv-" ) - warnings.filterwarnings("default", category=ResourceWarning) + warnings.filterwarnings("ignore", category=ResourceWarning) packages = packages if packages else [] editable_packages = editable_packages if editable_packages else [] package_args = [p for p in packages if p] + [p for p in editable_packages if p] @@ -630,15 +630,11 @@ def do_init( packages=packages, editable_packages=editable_packages, ) - # Write out the lockfile if it doesn't exist. - if not project.lockfile_exists: + # Write out the lockfile if it doesn't exist and skip_lock is False + if not project.lockfile_exists and not skip_lock: # Unless we're in a virtualenv not managed by pipenv, abort if we're # using the system's python. - if ( - (system or allow_global) - and not (project.s.PIPENV_VIRTUALENV) - and skip_lock is False - ): + if (system or allow_global) and not project.s.PIPENV_VIRTUALENV: raise exceptions.PipenvOptionsError( "--system", "--system is intended to be used for Pipfile installation, " diff --git a/pipenv/routines/lock.py b/pipenv/routines/lock.py index 78893b307..ec12370ca 100644 --- a/pipenv/routines/lock.py +++ b/pipenv/routines/lock.py @@ -12,6 +12,7 @@ def do_lock( clear=False, pre=False, write=True, + quiet=False, pypi_mirror=None, categories=None, extra_pip_args=None, @@ -46,15 +47,15 @@ def do_lock( packages = project.get_pipfile_section(pipfile_category) if write: - # Alert the user of progress. - click.echo( - "{} {} {}".format( - click.style("Locking"), - click.style(f"[{pipfile_category}]", fg="yellow"), - click.style("dependencies..."), - ), - err=True, - ) + if not quiet: # Alert the user of progress. + click.echo( + "{} {} {}".format( + click.style("Locking"), + click.style(f"[{pipfile_category}]", fg="yellow"), + click.style("dependencies..."), + ), + err=True, + ) # Prune old lockfile category as new one will be created. with contextlib.suppress(KeyError): @@ -89,15 +90,16 @@ def do_lock( if write: lockfile.update({"_meta": project.get_lockfile_meta()}) project.write_lockfile(lockfile) - click.echo( - "{}".format( - click.style( - f"Updated Pipfile.lock ({project.get_lockfile_hash()})!", - bold=True, - ) - ), - err=True, - ) + if not quiet: + click.echo( + "{}".format( + click.style( + f"Updated Pipfile.lock ({project.get_lockfile_hash()})!", + bold=True, + ) + ), + err=True, + ) else: return lockfile diff --git a/pipenv/routines/outdated.py b/pipenv/routines/outdated.py index 5340e7454..d1ef19332 100644 --- a/pipenv/routines/outdated.py +++ b/pipenv/routines/outdated.py @@ -88,7 +88,7 @@ def do_outdated(project, pypi_mirror=None, pre=False, clear=False): if not outdated: click.echo(click.style("All packages are up to date!", fg="green", bold=True)) sys.exit(0) - for package, new_version, old_version in outdated: + for package, old_version, new_version in outdated: click.echo( f"Package {package!r} out-of-date: {old_version!r} installed, {new_version!r} available." ) diff --git a/pipenv/routines/uninstall.py b/pipenv/routines/uninstall.py index 27e65cf7d..7939ccbd3 100644 --- a/pipenv/routines/uninstall.py +++ b/pipenv/routines/uninstall.py @@ -3,7 +3,9 @@ from pipenv import exceptions from pipenv.patched.pip._internal.build_env import get_runnable_pip +from pipenv.project import Project from pipenv.routines.lock import do_lock +from pipenv.utils import console from pipenv.utils.dependencies import ( expansive_install_req_from_line, get_lockfile_section_using_pipfile_category, @@ -15,12 +17,16 @@ from pipenv.utils.resolver import venv_resolve_deps from pipenv.utils.shell import cmd_list_to_shell, project_python from pipenv.vendor import click +from pipenv.vendor.importlib_metadata.compat.py39 import normalized_name -def _uninstall_from_environment(project, package, system=False): +def _uninstall_from_environment(project: Project, package, system=False): # Execute the uninstall command for the package - click.secho(f"Uninstalling {package}...", fg="green", bold=True) - with project.environment.activated(): + with project.environment.activated() as is_active: + if not is_active: + return False + + console.print(f"Uninstalling {package}...", style="bold green") cmd = [ project_python(project, system=system), get_runnable_pip(), @@ -37,7 +43,7 @@ def _uninstall_from_environment(project, package, system=False): def do_uninstall( - project, + project: Project, packages=None, editable_packages=None, python=False, @@ -82,22 +88,29 @@ def do_uninstall( if all: click.secho( click.style( - "Un-installing all {}...".format(click.style("[packages]", fg="yellow")), + "Un-installing all packages...", bold=True, ) ) - # Uninstall all dev-packages from environment - for package in project.get_pipfile_section("packages"): - _uninstall_from_environment(project, package, system=system) - # Remove the package from the Pipfile - if project.reset_category_in_pipfile(category="packages"): - click.echo("Removed [packages] from Pipfile.") + # Uninstall all packages from all groups + for category in project.get_package_categories(): + if category in ["source", "requires"]: + continue + for package in project.get_pipfile_section(category): + _uninstall_from_environment(project, package, system=system) + + # Clear all categories in the lockfile + for category in list(lockfile_content.keys()): + if category != "_meta": + lockfile_content[category] = {} - # Finalize changes to lockfile - lockfile_content["default"] = {} lockfile_content.update({"_meta": project.get_lockfile_meta()}) project.write_lockfile(lockfile_content) + # Call do_purge to remove all packages from the environment + do_purge(project, bare=False, downloads=False, allow_global=system) + return + package_args = list(packages) + [f"-e {pkg}" for pkg in editable_packages] # Determine packages and their dependencies for removal @@ -177,8 +190,7 @@ def do_purge(project, bare=False, downloads=False, allow_global=False): # Remove comments from the output, if any. installed = { - pep423_name(pkg.project_name) - for pkg in project.environment.get_installed_packages() + normalized_name(pkg) for pkg in project.environment.get_installed_packages() } bad_pkgs = {pep423_name(pkg) for pkg in BAD_PACKAGES} # Remove setuptools, pip, etc from targets for removal diff --git a/pipenv/shells.py b/pipenv/shells.py index 89467cddb..70938a1f3 100644 --- a/pipenv/shells.py +++ b/pipenv/shells.py @@ -35,27 +35,35 @@ def _get_activate_script(cmd, venv): This is POSIX-only at the moment since the compat (pexpect-based) shell does not work elsewhere anyway. """ - # Suffix and source command for other shells. - # Support for fish shell. - if "fish" in cmd: + # Suffix and source command for various shells. + command = "source" + + if cmd.endswith("fish"): suffix = ".fish" - command = "source" - # Support for csh shell. - elif "csh" in cmd: + elif cmd.endswith("csh"): suffix = ".csh" - command = "source" - elif "xonsh" in cmd: + elif cmd.endswith("xonsh"): suffix = ".xsh" - command = "source" - elif "nu" in cmd: + elif cmd.endswith("nu"): suffix = ".nu" command = "overlay use" - else: - suffix = "" + elif cmd.endswith(("pwsh", "powershell")): + suffix = ".ps1" command = "." + elif cmd.endswith(("sh", "bash", "zsh")): + suffix = "" + else: + sys.exit(f"unknown shell {cmd}") + # Escape any special characters located within the virtualenv path to allow # for proper activation. venv_location = re.sub(r"([ &$()\[\]])", r"\\\1", str(venv)) + + if suffix == "nu": + return f"overlay use {venv_location}" + elif suffix == ".ps1": + return f". {venv_location}\\Scripts\\Activate.{suffix}" + # The leading space can make history cleaner in some shells. return f" {command} {venv_location}/bin/activate{suffix}" diff --git a/pipenv/utils/dependencies.py b/pipenv/utils/dependencies.py index 868ea5a4e..aa110e9b1 100644 --- a/pipenv/utils/dependencies.py +++ b/pipenv/utils/dependencies.py @@ -11,7 +11,7 @@ from pathlib import Path from tempfile import NamedTemporaryFile, TemporaryDirectory from typing import Any, AnyStr, Dict, List, Mapping, Optional, Sequence, Union -from urllib.parse import urlparse, urlsplit, urlunsplit +from urllib.parse import urlparse, urlsplit, urlunparse, urlunsplit from pipenv.patched.pip._internal.models.link import Link from pipenv.patched.pip._internal.network.download import Downloader @@ -199,6 +199,45 @@ def unearth_hashes_for_dep(project, dep): return [] +def extract_vcs_url(vcs_url): + # Remove leading/trailing whitespace + vcs_url = vcs_url.strip() + + # Check if it's a file URI + parsed = urlparse(vcs_url) + if parsed.scheme == "file": + # For file URIs, we want to keep the entire URL intact + return vcs_url + + # Remove the package name and '@' if present at the start + if "@" in vcs_url and not vcs_url.startswith(tuple(f"{vcs}+" for vcs in VCS_LIST)): + vcs_url = vcs_url.split("@", 1)[1] + + # Remove the VCS prefix (e.g., 'git+') + for prefix in VCS_LIST: + vcs_prefix = f"{prefix}+" + if vcs_url.startswith(vcs_prefix): + vcs_url = vcs_url[len(vcs_prefix) :] + break + + # Parse the URL + parsed = urlparse(vcs_url) + + # Reconstruct the URL, preserving authentication details + clean_url = urlunparse( + ( + parsed.scheme, + parsed.netloc, + parsed.path, + "", # params + "", # query + "", # fragment + ) + ) + + return clean_url + + def clean_resolved_dep(project, dep, is_top_level=False, current_entry=None): from pipenv.patched.pip._vendor.packaging.requirements import ( Requirement as PipRequirement, @@ -237,15 +276,17 @@ def clean_resolved_dep(project, dep, is_top_level=False, current_entry=None): is_vcs_or_file = False for vcs_type in VCS_LIST: if vcs_type in dep: - if "[" in dep[vcs_type] and "]" in dep[vcs_type]: - extras_section = dep[vcs_type].split("[").pop().replace("]", "") + vcs_url = dep[vcs_type] + if "[" in vcs_url and "]" in vcs_url: + extras_section = vcs_url.split("[").pop().replace("]", "") lockfile["extras"] = sorted( [extra.strip() for extra in extras_section.split(",")] ) - if has_name_with_extras(dep[vcs_type]): - lockfile[vcs_type] = dep[vcs_type].split("@ ", 1)[1] - else: - lockfile[vcs_type] = dep[vcs_type] + + # Extract the clean VCS URL + clean_vcs_url = extract_vcs_url(vcs_url) + + lockfile[vcs_type] = clean_vcs_url lockfile["ref"] = dep.get("ref") if "subdirectory" in dep: lockfile["subdirectory"] = dep["subdirectory"] @@ -390,7 +431,7 @@ def dependency_as_pip_install_line( if not vcs: for k in ["file", "path"]: if k in dep: - if is_editable_path(dep[k]): + if dep.get("editable") and is_editable_path(dep[k]): line.append("-e") extras = "" if "extras" in dep: diff --git a/pipenv/utils/fileutils.py b/pipenv/utils/fileutils.py index f6fe5d2c9..7cc8166fc 100644 --- a/pipenv/utils/fileutils.py +++ b/pipenv/utils/fileutils.py @@ -1,4 +1,5 @@ """A collection for utilities for working with files and paths.""" + import atexit import io import os diff --git a/pipenv/utils/funktools.py b/pipenv/utils/funktools.py index 5f33b1e2f..708f6cfc7 100644 --- a/pipenv/utils/funktools.py +++ b/pipenv/utils/funktools.py @@ -1,6 +1,7 @@ """ A small collection of useful functional tools for working with iterables. """ + import errno import locale import os diff --git a/pipenv/utils/indexes.py b/pipenv/utils/indexes.py index 99485caf5..15c6341c5 100644 --- a/pipenv/utils/indexes.py +++ b/pipenv/utils/indexes.py @@ -90,9 +90,11 @@ def get_source_list( if pypi_mirror: sources = [ - create_mirror_source(pypi_mirror, source["name"]) - if is_pypi_url(source["url"]) - else source + ( + create_mirror_source(pypi_mirror, source["name"]) + if is_pypi_url(source["url"]) + else source + ) for source in sources ] return sources diff --git a/pipenv/utils/requirements.py b/pipenv/utils/requirements.py index 9a1ef185c..e6a761bfe 100644 --- a/pipenv/utils/requirements.py +++ b/pipenv/utils/requirements.py @@ -201,7 +201,7 @@ def requirement_from_lockfile( line = [] if k in package_info: path = package_info[k] - if is_editable_path(path): + if package_info.get("editable") and is_editable_path(path): line.append("-e") line.append(f"{package_info[k]}") if os_markers: diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index f04f27004..4054ebdc7 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -5,7 +5,7 @@ import sys import tempfile import warnings -from functools import lru_cache +from functools import cached_property, lru_cache from pathlib import Path from typing import Dict, List, Optional @@ -24,19 +24,11 @@ from pipenv.patched.pip._internal.req.req_file import parse_requirements from pipenv.patched.pip._internal.req.req_install import InstallRequirement from pipenv.patched.pip._internal.utils.temp_dir import global_tempdir_manager -from pipenv.patched.pip._vendor import rich from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name from pipenv.project import Project +from pipenv.utils import console, err from pipenv.utils.fileutils import create_tracked_tempdir from pipenv.utils.requirements import normalize_name -from pipenv.vendor import click - -try: - # this is only in Python3.8 and later - from functools import cached_property -except ImportError: - # eventually distlib will remove cached property when they drop Python3.7 - from pipenv.patched.pip._vendor.distlib.util import cached_property from .dependencies import ( HackedPythonVersion, @@ -58,9 +50,6 @@ else: import importlib.metadata as importlib_metadata -console = rich.console.Console() -err = rich.console.Console(stderr=True) - def get_package_finder( install_cmd=None, @@ -92,7 +81,6 @@ def get_package_finder( class HashCacheMixin: - """Caches hashes of PyPI artifacts so we do not need to re-download them. Hashes are only cached when the URL appears to contain a hash in it and the @@ -539,11 +527,10 @@ def resolve_constraints(self): if result.req: result.req.marker = marker except TypeError as e: - click.echo( + err.print( f"Error generating python marker for {candidate}. " f"Is the specifier {requires_python} incorrectly quoted or otherwise wrong?" f"Full error: {e}", - err=True, ) new_tree.add(result) @@ -714,15 +701,14 @@ def actually_resolve_deps( def resolve(cmd, st, project): from pipenv.cmdparse import Script - from pipenv.vendor.click import echo c = subprocess_run(Script.parse(cmd).cmd_args, block=False, env=os.environ.copy()) is_verbose = project.s.is_verbose() - err = "" + errors = "" for line in iter(c.stderr.readline, ""): if not line.rstrip(): continue - err += line + errors += line if is_verbose: st.console.print(line.rstrip()) @@ -731,13 +717,13 @@ def resolve(cmd, st, project): out = c.stdout.read() if returncode != 0: st.console.print(environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!")) - echo(out.strip(), err=True) + err.print(out.strip()) if not is_verbose: - echo(err, err=True) + err.print(err) raise RuntimeError("Failed to lock Pipfile.lock!") if is_verbose: - echo(out.strip(), err=True) - return subprocess.CompletedProcess(c.args, returncode, out, err) + err.print(out.strip()) + return subprocess.CompletedProcess(c.args, returncode, out, errors) def venv_resolve_deps( @@ -876,8 +862,8 @@ def venv_resolve_deps( with open(target_file.name) as fh: results = json.load(fh) except (IndexError, json.JSONDecodeError): - click.echo(c.stdout.strip(), err=True) - click.echo(c.stderr.strip(), err=True) + err.print(c.stdout.strip()) + err.print(c.stderr.strip()) if os.path.exists(target_file.name): os.unlink(target_file.name) raise RuntimeError("There was a problem with locking.") @@ -887,13 +873,15 @@ def venv_resolve_deps( environments.PIPENV_SPINNER_OK_TEXT.format("Success!") ) if not project.s.is_verbose() and c.stderr.strip(): - click.echo(click.style(f"Warning: {c.stderr.strip()}"), err=True) + err.print( + f"Warning: {c.stderr.strip()}", overflow="ignore", crop=False + ) else: st.console.print( environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!") ) - click.echo(f"Output: {c.stdout.strip()}", err=True) - click.echo(f"Error: {c.stderr.strip()}", err=True) + err.print(f"Output: {c.stdout.strip()}") + err.print(f"Error: {c.stderr.strip()}") if lockfile_section not in lockfile: lockfile[lockfile_section] = {} return prepare_lockfile( diff --git a/pipenv/utils/toml.py b/pipenv/utils/toml.py index 1eec3fed6..b97d32d3b 100644 --- a/pipenv/utils/toml.py +++ b/pipenv/utils/toml.py @@ -1,8 +1,9 @@ from typing import Union from pipenv.vendor.plette.models import Package, PackageCollection -from pipenv.vendor.tomlkit.container import Container +from pipenv.vendor.tomlkit.container import Container, OutOfOrderTableProxy from pipenv.vendor.tomlkit.items import AoT, Array, Bool, InlineTable, Item, String, Table +from pipenv.vendor.tomlkit.toml_document import TOMLDocument try: import tomllib as toml @@ -33,26 +34,32 @@ def cleanup_toml(tml): return toml -def convert_toml_outline_tables(parsed, project): +def convert_toml_outline_tables(parsed: TOMLDocument, project) -> TOMLDocument: """Converts all outline tables to inline tables.""" def convert_tomlkit_table(section): - result = section.copy() - if isinstance(section, tomlkit.items.Table): + result: Table = tomlkit.table() + if isinstance(section, Table): body = section.value._body - elif isinstance(section, tomlkit.container.OutOfOrderTableProxy): + elif isinstance(section, OutOfOrderTableProxy): body = section._internal_container._body else: - body = section._body + assert not hasattr(section, "_body") + body = section + + index: int = 0 for key, value in body: if not key: continue - if hasattr(value, "keys") and not isinstance( - value, tomlkit.items.InlineTable - ): + if hasattr(value, "keys") and not isinstance(value, InlineTable): table = tomlkit.inline_table() table.update(value.value) - result[key.key] = table + key.sep = " = " # add separator because it did not exist before + result.append(key, table) + else: + result.append(key, value) + index += 1 + return result def convert_toml_table(section): @@ -66,10 +73,10 @@ def convert_toml_table(section): result[package] = table return result - is_tomlkit_parsed = isinstance(parsed, tomlkit.container.Container) + is_tomlkit_parsed = isinstance(parsed, Container) for section in project.get_package_categories(): table_data = parsed.get(section, {}) - if not table_data: + if table_data is None: continue if is_tomlkit_parsed: result = convert_tomlkit_table(table_data) diff --git a/pipenv/utils/virtualenv.py b/pipenv/utils/virtualenv.py index 573834aba..1084c143f 100644 --- a/pipenv/utils/virtualenv.py +++ b/pipenv/utils/virtualenv.py @@ -185,7 +185,7 @@ def ensure_virtualenv(project, python=None, site_packages=None, pypi_mirror=None def cleanup_virtualenv(project, bare=True): """Removes the virtualenv directory from the system.""" if not bare: - console.pritn("[red]Environment creation aborted.[/red]") + console.print("[red]Environment creation aborted.[/red]") try: # Delete the virtualenv. shutil.rmtree(project.virtualenv_location) @@ -205,7 +205,7 @@ def abort(msg=""): err.print(f"[red]{msg}[/red]") err.print("You can specify specific versions of Python with:") err.print( - f"[yellow]$ pipenv --python {os.sep.join('path', 'to', 'python')}[/yellow]" + f"[yellow]$ pipenv --python {os.sep.join(['path', 'to', 'python'])}[/yellow]" ) sys.exit(1) @@ -272,7 +272,7 @@ def abort(msg=""): else: # Tell the user we're installing Python. console.print( - f"[bold]Installing [green]CPython[/green] {version} with {installer.cmd}[/bold]" + "f[bold]Installing [green]CPython[/green] {version} with {installer.cmd}[/bold]" ) console.print("(this may take a few minutes)[bold]...[/bold]") with console.status( diff --git a/pipenv/vendor/importlib_metadata/__init__.py b/pipenv/vendor/importlib_metadata/__init__.py index 1ab98225a..a3fd94ecf 100644 --- a/pipenv/vendor/importlib_metadata/__init__.py +++ b/pipenv/vendor/importlib_metadata/__init__.py @@ -8,25 +8,23 @@ import pipenv.vendor.zipp as zipp import email import types -import inspect import pathlib import operator import textwrap -import warnings import functools import itertools import posixpath import collections -from . import _adapters, _meta -from .compat import py39 +from . import _meta +from .compat import py39, py311 from ._collections import FreezableDefaultDict, Pair from ._compat import ( NullFinder, install, ) from ._functools import method_cache, pass_none -from ._itertools import always_iterable, unique_everseen +from ._itertools import always_iterable, bucket, unique_everseen from ._meta import PackageMetadata, SimplePath from contextlib import suppress @@ -40,6 +38,7 @@ 'DistributionFinder', 'PackageMetadata', 'PackageNotFoundError', + 'SimplePath', 'distribution', 'distributions', 'entry_points', @@ -227,9 +226,26 @@ def matches(self, **params): >>> ep.matches(attr='bong') True """ + self._disallow_dist(params) attrs = (getattr(self, param) for param in params) return all(map(operator.eq, params.values(), attrs)) + @staticmethod + def _disallow_dist(params): + """ + Querying by dist is not allowed (dist objects are not comparable). + >>> EntryPoint(name='fan', value='fav', group='fag').matches(dist='foo') + Traceback (most recent call last): + ... + ValueError: "dist" is not suitable for matching... + """ + if "dist" in params: + raise ValueError( + '"dist" is not suitable for matching. ' + "Instead, use Distribution.entry_points.select() on a " + "located distribution." + ) + def _key(self): return self.name, self.value, self.group @@ -334,27 +350,7 @@ def __repr__(self) -> str: return f'' -class DeprecatedNonAbstract: - # Required until Python 3.14 - def __new__(cls, *args, **kwargs): - all_names = { - name for subclass in inspect.getmro(cls) for name in vars(subclass) - } - abstract = { - name - for name in all_names - if getattr(getattr(cls, name), '__isabstractmethod__', False) - } - if abstract: - warnings.warn( - f"Unimplemented abstract methods {abstract}", - DeprecationWarning, - stacklevel=2, - ) - return super().__new__(cls) - - -class Distribution(DeprecatedNonAbstract): +class Distribution(metaclass=abc.ABCMeta): """ An abstract Python distribution package. @@ -393,6 +389,17 @@ def locate_file(self, path: str | os.PathLike[str]) -> SimplePath: """ Given a path to a file in this distribution, return a SimplePath to it. + + This method is used by callers of ``Distribution.files()`` to + locate files within the distribution. If it's possible for a + Distribution to represent files in the distribution as + ``SimplePath`` objects, it should implement this method + to resolve such objects. + + Some Distribution providers may elect not to resolve SimplePath + objects within the distribution by raising a + NotImplementedError, but consumers of such a Distribution would + be unable to invoke ``Distribution.files()``. """ @classmethod @@ -409,7 +416,7 @@ def from_name(cls, name: str) -> Distribution: if not name: raise ValueError("A distribution name is required.") try: - return next(iter(cls.discover(name=name))) + return next(iter(cls._prefer_valid(cls.discover(name=name)))) except StopIteration: raise PackageNotFoundError(name) @@ -433,6 +440,16 @@ def discover( resolver(context) for resolver in cls._discover_resolvers() ) + @staticmethod + def _prefer_valid(dists: Iterable[Distribution]) -> Iterable[Distribution]: + """ + Prefer (move to the front) distributions that have metadata. + + Ref python/importlib_resources#489. + """ + buckets = bucket(dists, lambda dist: bool(dist.metadata)) + return itertools.chain(buckets[True], buckets[False]) + @staticmethod def at(path: str | os.PathLike[str]) -> Distribution: """Return a Distribution for the indicated metadata path. @@ -461,6 +478,9 @@ def metadata(self) -> _meta.PackageMetadata: Custom providers may provide the METADATA file or override this property. """ + # deferred for performance (python/cpython#109829) + from . import _adapters + opt_text = ( self.read_text('METADATA') or self.read_text('PKG-INFO') @@ -567,9 +587,8 @@ def _read_files_egginfo_installed(self): return paths = ( - (subdir / name) - .resolve() - .relative_to(self.locate_file('').resolve()) + py311.relative_fix((subdir / name).resolve()) + .relative_to(self.locate_file('').resolve(), walk_up=True) .as_posix() for name in text.splitlines() ) @@ -1086,11 +1105,10 @@ def _get_toplevel_name(name: PackagePath) -> str: >>> _get_toplevel_name(PackagePath('foo.dist-info')) 'foo.dist-info' """ - return _topmost(name) or ( - # python/typeshed#10328 - inspect.getmodulename(name) # type: ignore - or str(name) - ) + # Defer import of inspect for performance (python/cpython#118761) + import inspect + + return _topmost(name) or (inspect.getmodulename(name) or str(name)) def _top_level_inferred(dist): diff --git a/pipenv/vendor/importlib_metadata/_adapters.py b/pipenv/vendor/importlib_metadata/_adapters.py index 120e43a04..6223263ed 100644 --- a/pipenv/vendor/importlib_metadata/_adapters.py +++ b/pipenv/vendor/importlib_metadata/_adapters.py @@ -1,20 +1,8 @@ -import functools -import warnings import re import textwrap import email.message from ._text import FoldedCase -from ._compat import pypy_partial - - -# Do not remove prior to 2024-01-01 or Python 3.14 -_warn = functools.partial( - warnings.warn, - "Implicit None on return values is deprecated and will raise KeyErrors.", - DeprecationWarning, - stacklevel=pypy_partial(2), -) class Message(email.message.Message): @@ -53,12 +41,17 @@ def __iter__(self): def __getitem__(self, item): """ - Warn users that a ``KeyError`` can be expected when a - missing key is supplied. Ref python/importlib_metadata#371. + Override parent behavior to typical dict behavior. + + ``email.message.Message`` will emit None values for missing + keys. Typical mappings, including this ``Message``, will raise + a key error for missing keys. + + Ref python/importlib_metadata#371. """ res = super().__getitem__(item) if res is None: - _warn() + raise KeyError(item) return res def _repair_headers(self): diff --git a/pipenv/vendor/importlib_metadata/_itertools.py b/pipenv/vendor/importlib_metadata/_itertools.py index d4ca9b914..79d37198c 100644 --- a/pipenv/vendor/importlib_metadata/_itertools.py +++ b/pipenv/vendor/importlib_metadata/_itertools.py @@ -1,3 +1,4 @@ +from collections import defaultdict, deque from itertools import filterfalse @@ -71,3 +72,100 @@ def always_iterable(obj, base_type=(str, bytes)): return iter(obj) except TypeError: return iter((obj,)) + + +# Copied from more_itertools 10.3 +class bucket: + """Wrap *iterable* and return an object that buckets the iterable into + child iterables based on a *key* function. + + >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] + >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character + >>> sorted(list(s)) # Get the keys + ['a', 'b', 'c'] + >>> a_iterable = s['a'] + >>> next(a_iterable) + 'a1' + >>> next(a_iterable) + 'a2' + >>> list(s['b']) + ['b1', 'b2', 'b3'] + + The original iterable will be advanced and its items will be cached until + they are used by the child iterables. This may require significant storage. + + By default, attempting to select a bucket to which no items belong will + exhaust the iterable and cache all values. + If you specify a *validator* function, selected buckets will instead be + checked against it. + + >>> from itertools import count + >>> it = count(1, 2) # Infinite sequence of odd numbers + >>> key = lambda x: x % 10 # Bucket by last digit + >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only + >>> s = bucket(it, key=key, validator=validator) + >>> 2 in s + False + >>> list(s[2]) + [] + + """ + + def __init__(self, iterable, key, validator=None): + self._it = iter(iterable) + self._key = key + self._cache = defaultdict(deque) + self._validator = validator or (lambda x: True) + + def __contains__(self, value): + if not self._validator(value): + return False + + try: + item = next(self[value]) + except StopIteration: + return False + else: + self._cache[value].appendleft(item) + + return True + + def _get_values(self, value): + """ + Helper to yield items from the parent iterator that match *value*. + Items that don't match are stored in the local cache as they + are encountered. + """ + while True: + # If we've cached some items that match the target value, emit + # the first one and evict it from the cache. + if self._cache[value]: + yield self._cache[value].popleft() + # Otherwise we need to advance the parent iterator to search for + # a matching item, caching the rest. + else: + while True: + try: + item = next(self._it) + except StopIteration: + return + item_value = self._key(item) + if item_value == value: + yield item + break + elif self._validator(item_value): + self._cache[item_value].append(item) + + def __iter__(self): + for item in self._it: + item_value = self._key(item) + if self._validator(item_value): + self._cache[item_value].append(item) + + yield from self._cache.keys() + + def __getitem__(self, value): + if not self._validator(value): + return iter(()) + + return self._get_values(value) diff --git a/pipenv/vendor/importlib_metadata/compat/py311.py b/pipenv/vendor/importlib_metadata/compat/py311.py new file mode 100644 index 000000000..3a5327436 --- /dev/null +++ b/pipenv/vendor/importlib_metadata/compat/py311.py @@ -0,0 +1,22 @@ +import os +import pathlib +import sys +import types + + +def wrap(path): # pragma: no cover + """ + Workaround for https://github.com/python/cpython/issues/84538 + to add backward compatibility for walk_up=True. + An example affected package is dask-labextension, which uses + jupyter-packaging to install JupyterLab javascript files outside + of site-packages. + """ + + def relative_to(root, *, walk_up=False): + return pathlib.Path(os.path.relpath(path, root)) + + return types.SimpleNamespace(relative_to=relative_to) + + +relative_fix = wrap if sys.version_info < (3, 12) else lambda x: x diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 9a74e5228..862a8f753 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -2,11 +2,11 @@ click-didyoumean==0.3.1 click==8.1.7 colorama==0.4.6 dparse==0.6.3 -importlib-metadata==7.1.0 +importlib-metadata==8.4.0 + zipp==3.18.1 packaging==24.0 pexpect==4.9.0 pipdeptree==2.18.1 - zipp==3.18.1 plette==2.1.0 ptyprocess==0.7.0 python-dotenv==1.0.1 diff --git a/pyproject.toml b/pyproject.toml index 9430c6175..2b11a9a14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,12 +8,12 @@ requires = [ name = "pipenv" description = "Python Development Workflow for Humans." readme = "README.md" -license = {file = "LICENSE"} +license = { file = "LICENSE" } authors = [ - {name = "Pipenv maintainer team", email = "distutils-sig@python.org"}, + { name = "Pipenv maintainer team", email = "distutils-sig@python.org" }, ] requires-python = ">=3.8" -classifiers=[ +classifiers = [ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", @@ -22,6 +22,7 @@ classifiers=[ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] @@ -30,56 +31,53 @@ dynamic = [ ] dependencies = [ "certifi", + "packaging>=22", "setuptools>=67", "virtualenv>=20.24.2", ] -[project.optional-dependencies] -dev = [ +optional-dependencies.dev = [ "beautifulsoup4", "black==23.3", - "flake8<4.0,>=3.3", + "flake8<4,>=3.3", "invoke", "parver", "sphinx", "towncrier", ] -tests = [ +optional-dependencies.tests = [ "flaky", "mock", "pytest>=5", "pytest-timeout", "pytest-xdist", ] -[project.urls] -Documentation = "https://pipenv.pypa.io/en/latest/" -Homepage = "https://github.com/pypa/pipenv" -Source = "https://github.com/pypa/pipenv.git" -[project.scripts] -pipenv = "pipenv:cli" -pipenv-resolver = "pipenv.resolver:main" +urls.Documentation = "https://pipenv.pypa.io/en/latest/" +urls.Homepage = "https://github.com/pypa/pipenv" +urls.Source = "https://github.com/pypa/pipenv.git" +scripts.pipenv = "pipenv:cli" +scripts.pipenv-resolver = "pipenv.resolver:main" [tool.setuptools.packages.find] -where = ["."] -exclude = ["tests*", "tests.*", "tasks*", "tasks.*"] - +where = [ "." ] +exclude = [ "tests*", "tests.*", "tasks*", "tasks.*" ] [tool.setuptools.package-data] -"*" = ["LICENSE", "NOTICES"] -"pipenv.patched.safety" = ["VERSION", "safety-policy-template.yml"] -"pipenv.patched.pip._vendor.certifi" = ["*.pem"] -"pipenv.patched.pip._vendor.requests" = ["*.pem"] +"*" = [ "LICENSE", "NOTICES" ] +"pipenv.patched.safety" = [ "VERSION", "safety-policy-template.yml" ] +"pipenv.patched.pip._vendor.certifi" = [ "*.pem" ] +"pipenv.patched.pip._vendor.requests" = [ "*.pem" ] "pipenv.patched.pip._vendor.distlib" = [ "t32.exe", "t64.exe", + "t64-arm.exe", "w32.exe", "w64.exe", + "w64-arm.exe", ] -"pipenv.vendor.ruamel" = ["yaml"] - +"pipenv.vendor.ruamel" = [ "yaml" ] [tool.setuptools.dynamic] -version = {attr = "pipenv.__version__"} - +version = { attr = "pipenv.__version__" } ## TESTING AND DEVELOPER CONFIGURATION BELOW ## @@ -110,6 +108,9 @@ exclude = ''' ''' [tool.ruff] +target-version = "py37" + +line-length = 137 exclude = [ "pipenv/patched/*", "pipenv/vendor/*", @@ -131,7 +132,7 @@ select = [ "TID", "UP", "W", - "YTT" + "YTT", ] ignore = [ "B904", @@ -139,40 +140,39 @@ ignore = [ "PLR5501", "PLW2901", ] -line-length = 137 -target-version = "py37" - - -[tool.ruff.mccabe] -max-complexity = 44 - +pylint.allow-magic-value-types = [ "int", "str" ] +pylint.max-args = 20 +pylint.max-branches = 38 +pylint.max-returns = 9 +pylint.max-statements = 155 +mccabe.max-complexity = 44 +per-file-ignores."docs/conf.py" = [ "E402", "E501" ] +per-file-ignores."get-pipenv.py" = [ "E402" ] +per-file-ignores."pipenv/__init__.py" = [ "E401" ] +per-file-ignores."pipenv/cli/command.py" = [ "TID252" ] +per-file-ignores."pipenv/utils/internet.py" = [ "PLW0603" ] +per-file-ignores."pipenv/utils/resolver.py" = [ "B018" ] +per-file-ignores."tests/*" = [ "E501", "F401", "I", "PLC1901", "S101" ] +per-file-ignores."tests/integration/conftest.py" = [ "B003", "PIE800", "PLW0603" ] +per-file-ignores."tests/integration/test_pipenv.py" = [ "E741" ] +per-file-ignores."tests/integration/test_requirements.py" = [ "E741" ] +per-file-ignores."tests/unit/test_funktools.py" = [ "B015" ] +per-file-ignores."tests/unit/test_utils.py" = [ "F811" ] -[tool.ruff.pylint] -allow-magic-value-types = ["int", "str"] -max-args = 20 -max-branches = 38 -max-returns = 9 -max-statements = 155 - - -[tool.ruff.per-file-ignores] -"docs/conf.py" = ["E402", "E501"] -"get-pipenv.py" = ["E402"] -"pipenv/__init__.py" = ["E401"] -"pipenv/cli/command.py" = ["TID252"] -"pipenv/utils/internet.py" = ["PLW0603"] -"pipenv/utils/resolver.py" = ["B018"] -"tests/*" = ["E501", "F401", "I", "PLC1901", "S101"] -"tests/integration/conftest.py" = ["B003", "PIE800", "PLW0603"] -"tests/integration/test_pipenv.py" = ["E741"] -"tests/integration/test_requirements.py" = ["E741"] -"tests/unit/test_funktools.py" = ["B015"] -"tests/unit/test_utils.py" = ["F811"] +[tool.pyproject-fmt] +# after how many column width split arrays/dicts into multiple lines, 1 will force always +column_width = 120 +# how many spaces use for indentation +indent = 2 +# if false will remove unnecessary trailing ``.0``'s from version specifiers +keep_full_version = true +# maximum Python version to use when generating version specifiers +max_supported_python = "3.13" [tool.pytest.ini_options] addopts = "-ra" plugins = "xdist" -testpaths = ["tests"] +testpaths = [ "tests" ] # Add vendor and patched in addition to the default list of ignored dirs # Additionally, ignore tasks, news, test subdirectories and peeps directory norecursedirs = [ @@ -192,7 +192,7 @@ norecursedirs = [ "tests/pypi", "peeps", ] -filterwarnings = [] +filterwarnings = [ ] # These are not all the custom markers, but most of the ones with repeat uses # `pipenv run pytest --markers` will list all markers including these markers = [ @@ -225,13 +225,6 @@ markers = [ [tool.coverage.run] parallel = true -[tool.mypy] -ignore_missing_imports = true -follow_imports = "skip" -html_report = "mypyhtml" -python_version = "3.7" -mypy_path = "typeshed/pyi:typeshed/imports" - [tool.towncrier] package = "pipenv" filename = "CHANGELOG.md" @@ -240,42 +233,49 @@ directory = "news/" title_format = "{version} ({project_date})" template = "news/towncrier_template.rst" - [[tool.towncrier.type]] - directory = "feature" - name = "Features & Improvements" - showcontent = true +[[tool.towncrier.type]] +directory = "feature" +name = "Features & Improvements" +showcontent = true + +[[tool.towncrier.type]] +directory = "behavior" +name = "Behavior Changes" +showcontent = true - [[tool.towncrier.type]] - directory = "behavior" - name = "Behavior Changes" - showcontent = true +[[tool.towncrier.type]] +directory = "bugfix" +name = "Bug Fixes" +showcontent = true - [[tool.towncrier.type]] - directory = "bugfix" - name = "Bug Fixes" - showcontent = true +[[tool.towncrier.type]] +directory = "vendor" +name = "Vendored Libraries" +showcontent = true - [[tool.towncrier.type]] - directory = "vendor" - name = "Vendored Libraries" - showcontent = true +[[tool.towncrier.type]] +directory = "doc" +name = "Improved Documentation" +showcontent = true - [[tool.towncrier.type]] - directory = "doc" - name = "Improved Documentation" - showcontent = true +[[tool.towncrier.type]] +directory = "trivial" +name = "Trivial Changes" +showcontent = false - [[tool.towncrier.type]] - directory = "trivial" - name = "Trivial Changes" - showcontent = false +[[tool.towncrier.type]] +directory = "removal" +name = "Removals and Deprecations" +showcontent = true - [[tool.towncrier.type]] - directory = "removal" - name = "Removals and Deprecations" - showcontent = true +[[tool.towncrier.type]] +directory = "process" +name = "Relates to dev process changes" +showcontent = true - [[tool.towncrier.type]] - directory = "process" - name = "Relates to dev process changes" - showcontent = true +[tool.mypy] +ignore_missing_imports = true +follow_imports = "skip" +html_report = "mypyhtml" +python_version = "3.7" +mypy_path = "typeshed/pyi:typeshed/imports" diff --git a/tests/conftest.py b/tests/conftest.py index fc75b6b57..b8d46e68c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,4 +4,5 @@ @pytest.fixture() def project(): from pipenv.project import Project + return Project() diff --git a/tests/fixtures/cython-import-package/setup.py b/tests/fixtures/cython-import-package/setup.py index 3a816a94a..826b80b74 100644 --- a/tests/fixtures/cython-import-package/setup.py +++ b/tests/fixtures/cython-import-package/setup.py @@ -10,34 +10,31 @@ import Cython.Distutils - ROOT = os.path.dirname(__file__) -PACKAGE_NAME = 'cython_import_package' +PACKAGE_NAME = "cython_import_package" VERSION = None -with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: +with open(os.path.join(ROOT, "src", PACKAGE_NAME.replace("-", "_"), "__init__.py")) as f: for line in f: - if line.startswith('__version__ = '): - VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + if line.startswith("__version__ = "): + VERSION = ast.literal_eval(line[len("__version__ = ") :].strip()) break if VERSION is None: - raise OSError('failed to read version') + raise OSError("failed to read version") # Put everything in setup.cfg, except those that don't actually work? setup( # These really don't work. - package_dir={'': 'src'}, - packages=find_packages('src'), - + package_dir={"": "src"}, + packages=find_packages("src"), # I don't know how to specify an empty key in setup.cfg. package_data={ - '': ['LICENSE*', 'README*'], + "": ["LICENSE*", "README*"], }, setup_requires=["setuptools_scm", "cython"], - # I need this to be dynamic. version=VERSION, ) diff --git a/tests/fixtures/fake-package/docs/conf.py b/tests/fixtures/fake-package/docs/conf.py index 5f59ed523..c50a4ea0b 100644 --- a/tests/fixtures/fake-package/docs/conf.py +++ b/tests/fixtures/fake-package/docs/conf.py @@ -13,6 +13,7 @@ # import os import sys + ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PACKAGE_DIR = os.path.join(ROOT, "src/fake_package") sys.path.insert(0, PACKAGE_DIR) @@ -20,14 +21,14 @@ # -- Project information ----------------------------------------------------- -project = 'fake_package' -copyright = '2019, Dan Ryan ' -author = 'Dan Ryan ' +project = "fake_package" +copyright = "2019, Dan Ryan " +author = "Dan Ryan " # The short X.Y version -version = '0.0' +version = "0.0" # The full version, including alpha/beta/rc tags -release = '0.0.0.dev0' +release = "0.0.0.dev0" # -- General configuration --------------------------------------------------- @@ -40,39 +41,39 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.viewcode', - 'sphinx.ext.todo', - 'sphinx.ext.intersphinx', - 'sphinx.ext.autosummary' + "sphinx.ext.autodoc", + "sphinx.ext.viewcode", + "sphinx.ext.todo", + "sphinx.ext.intersphinx", + "sphinx.ext.autosummary", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ['_build', '_man', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "_man", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" autosummary_generate = True @@ -81,7 +82,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -92,7 +93,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -108,22 +109,22 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'fake_packagedoc' +htmlhelp_basename = "fake_packagedoc" extlinks = { - 'issue': ('https://github.com/sarugaku/fake_package/issues/%s', '#'), - 'pull': ('https://github.com/sarugaku/fake_package/pull/%s', 'PR #'), + "issue": ("https://github.com/sarugaku/fake_package/issues/%s", "#"), + "pull": ("https://github.com/sarugaku/fake_package/pull/%s", "PR #"), } html_theme_options = { - 'display_version': True, - 'prev_next_buttons_location': 'bottom', - 'style_external_links': True, - 'vcs_pageview_mode': '', + "display_version": True, + "prev_next_buttons_location": "bottom", + "style_external_links": True, + "vcs_pageview_mode": "", # Toc options - 'collapse_navigation': True, - 'sticky_navigation': True, - 'navigation_depth': 4, - 'includehidden': True, - 'titles_only': False + "collapse_navigation": True, + "sticky_navigation": True, + "navigation_depth": 4, + "includehidden": True, + "titles_only": False, } # -- Options for LaTeX output ------------------------------------------------ @@ -132,15 +133,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -150,8 +148,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'fake_package.tex', 'fake_package Documentation', - 'Dan Ryan \\textless{}dan@danryan.co\\textgreater{}', 'manual'), + ( + master_doc, + "fake_package.tex", + "fake_package Documentation", + "Dan Ryan \\textless{}dan@danryan.co\\textgreater{}", + "manual", + ), ] @@ -159,10 +162,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'fake_package', 'fake_package Documentation', - [author], 1) -] +man_pages = [(master_doc, "fake_package", "fake_package Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -171,9 +171,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'fake_package', 'fake_package Documentation', - author, 'fake_package', 'A fake python package.', - 'Miscellaneous'), + ( + master_doc, + "fake_package", + "fake_package Documentation", + author, + "fake_package", + "A fake python package.", + "Miscellaneous", + ), ] @@ -195,7 +201,7 @@ # epub_uid = '' # A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] +epub_exclude_files = ["search.html"] # -- Extension configuration ------------------------------------------------- @@ -204,4 +210,4 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True -intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} diff --git a/tests/fixtures/fake-package/setup.py b/tests/fixtures/fake-package/setup.py index 3048d5060..7a23b480a 100644 --- a/tests/fixtures/fake-package/setup.py +++ b/tests/fixtures/fake-package/setup.py @@ -6,30 +6,28 @@ ROOT = os.path.dirname(__file__) -PACKAGE_NAME = 'fake_package' +PACKAGE_NAME = "fake_package" VERSION = None -with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: +with open(os.path.join(ROOT, "src", PACKAGE_NAME.replace("-", "_"), "__init__.py")) as f: for line in f: - if line.startswith('__version__ = '): - VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + if line.startswith("__version__ = "): + VERSION = ast.literal_eval(line[len("__version__ = ") :].strip()) break if VERSION is None: - raise OSError('failed to read version') + raise OSError("failed to read version") # Put everything in setup.cfg, except those that don't actually work? setup( # These really don't work. - package_dir={'': 'src'}, - packages=find_packages('src'), - + package_dir={"": "src"}, + packages=find_packages("src"), # I don't know how to specify an empty key in setup.cfg. package_data={ - '': ['LICENSE*', 'README*'], + "": ["LICENSE*", "README*"], }, - # I need this to be dynamic. version=VERSION, ) diff --git a/tests/fixtures/fake-package/src/fake_package/__init__.py b/tests/fixtures/fake-package/src/fake_package/__init__.py index b8023d8bc..f102a9cad 100644 --- a/tests/fixtures/fake-package/src/fake_package/__init__.py +++ b/tests/fixtures/fake-package/src/fake_package/__init__.py @@ -1 +1 @@ -__version__ = '0.0.1' +__version__ = "0.0.1" diff --git a/tests/fixtures/fake-package/tasks/__init__.py b/tests/fixtures/fake-package/tasks/__init__.py index 8652dcaa6..8e9f02bd6 100644 --- a/tests/fixtures/fake-package/tasks/__init__.py +++ b/tests/fixtures/fake-package/tasks/__init__.py @@ -7,16 +7,18 @@ import parver from towncrier._builder import ( - find_fragments, render_fragments, split_fragments, + find_fragments, + render_fragments, + split_fragments, ) from towncrier._settings import load_config ROOT = pathlib.Path(__file__).resolve().parent.parent -PACKAGE_NAME = 'fake_package' +PACKAGE_NAME = "fake_package" -INIT_PY = ROOT.joinpath('src', PACKAGE_NAME, '__init__.py') +INIT_PY = ROOT.joinpath("src", PACKAGE_NAME, "__init__.py") @invoke.task() @@ -30,23 +32,24 @@ def typecheck(ctx): @invoke.task() def clean(ctx): - """Clean previously built package artifacts. - """ - ctx.run('python setup.py clean') - dist = ROOT.joinpath('dist') - print(f'[clean] Removing {dist}') + """Clean previously built package artifacts.""" + ctx.run("python setup.py clean") + dist = ROOT.joinpath("dist") + print(f"[clean] Removing {dist}") if dist.exists(): shutil.rmtree(str(dist)) def _read_version(): - out = subprocess.check_output(['git', 'tag'], encoding='ascii') + out = subprocess.check_output(["git", "tag"], encoding="ascii") try: - version = max(parver.Version.parse(v).normalize() for v in ( - line.strip() for line in out.split('\n') - ) if v) + version = max( + parver.Version.parse(v).normalize() + for v in (line.strip() for line in out.split("\n")) + if v + ) except ValueError: - version = parver.Version.parse('0.0.0') + version = parver.Version.parse("0.0.0") return version @@ -62,8 +65,7 @@ def _write_version(v): def _render_log(): - """Totally tap into Towncrier internals to get an in-memory result. - """ + """Totally tap into Towncrier internals to get an in-memory result.""" config = load_config(ROOT) definitions = config["types"] fragments, fragment_filenames = find_fragments( @@ -101,15 +103,14 @@ def _prebump(version, prebump): return next_version -PREBUMP = 'patch' +PREBUMP = "patch" @invoke.task(pre=[clean]) def release(ctx, type_, repo, prebump=PREBUMP): - """Make a new release. - """ + """Make a new release.""" if prebump not in REL_TYPES: - raise ValueError(f'{type_} not in {REL_TYPES}') + raise ValueError(f"{type_} not in {REL_TYPES}") prebump = REL_TYPES.index(prebump) version = _read_version() @@ -119,26 +120,26 @@ def release(ctx, type_, repo, prebump=PREBUMP): # Needs to happen before Towncrier deletes fragment files. tag_content = _render_log() - ctx.run('towncrier') + ctx.run("towncrier") ctx.run(f'git commit -am "Release {version}"') tag_content = tag_content.replace('"', '\\"') ctx.run(f'git tag -a {version} -m "Version {version}\n\n{tag_content}"') - ctx.run('python setup.py sdist bdist_wheel') + ctx.run("python setup.py sdist bdist_wheel") dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' - artifacts = list(ROOT.joinpath('dist').glob(dist_pattern)) - filename_display = '\n'.join(f' {a}' for a in artifacts) - print(f'[release] Will upload:\n{filename_display}') + artifacts = list(ROOT.joinpath("dist").glob(dist_pattern)) + filename_display = "\n".join(f" {a}" for a in artifacts) + print(f"[release] Will upload:\n{filename_display}") try: - input('[release] Release ready. ENTER to upload, CTRL-C to abort: ') + input("[release] Release ready. ENTER to upload, CTRL-C to abort: ") except KeyboardInterrupt: - print('\nAborted!') + print("\nAborted!") return - arg_display = ' '.join(f'"{n}"' for n in artifacts) + arg_display = " ".join(f'"{n}"' for n in artifacts) ctx.run(f'twine upload --repository="{repo}" {arg_display}') version = _prebump(version, prebump) @@ -151,9 +152,9 @@ def release(ctx, type_, repo, prebump=PREBUMP): def build_docs(ctx): _current_version = _read_version() minor = [str(i) for i in _current_version.release[:2]] - docs_folder = (ROOT / 'docs').as_posix() - if not docs_folder.endswith('/'): - docs_folder = f'{docs_folder}/' + docs_folder = (ROOT / "docs").as_posix() + if not docs_folder.endswith("/"): + docs_folder = f"{docs_folder}/" args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] args.extend(["-A", "'Dan Ryan '"]) args.extend(["-R", str(_current_version)]) diff --git a/tests/fixtures/legacy-backend-package/setup.py b/tests/fixtures/legacy-backend-package/setup.py index bb43fcaa1..b73c90a70 100644 --- a/tests/fixtures/legacy-backend-package/setup.py +++ b/tests/fixtures/legacy-backend-package/setup.py @@ -6,30 +6,28 @@ ROOT = os.path.dirname(__file__) -PACKAGE_NAME = 'legacy_backend_package' +PACKAGE_NAME = "legacy_backend_package" VERSION = None -with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: +with open(os.path.join(ROOT, "src", PACKAGE_NAME.replace("-", "_"), "__init__.py")) as f: for line in f: - if line.startswith('__version__ = '): - VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + if line.startswith("__version__ = "): + VERSION = ast.literal_eval(line[len("__version__ = ") :].strip()) break if VERSION is None: - raise OSError('failed to read version') + raise OSError("failed to read version") # Put everything in setup.cfg, except those that don't actually work? setup( # These really don't work. - package_dir={'': 'src'}, - packages=find_packages('src'), - + package_dir={"": "src"}, + packages=find_packages("src"), # I don't know how to specify an empty key in setup.cfg. package_data={ - '': ['LICENSE*', 'README*'], + "": ["LICENSE*", "README*"], }, - # I need this to be dynamic. version=VERSION, ) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 171774767..4175c21c0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -26,7 +26,9 @@ HAS_WARNED_GITHUB = False -DEFAULT_PRIVATE_PYPI_SERVER = os.environ.get("PIPENV_PYPI_SERVER", "http://localhost:8080/simple") +DEFAULT_PRIVATE_PYPI_SERVER = os.environ.get( + "PIPENV_PYPI_SERVER", "http://localhost:8080/simple" +) def try_internet(url="http://httpbin.org/ip", timeout=1.5): @@ -61,21 +63,25 @@ def check_github_ssh(): # GitHub does not provide shell access.' if ssh keys are available and # registered with GitHub. Otherwise, the command will fail with # return_code=255 and say 'Permission denied (publickey).' - c = subprocess_run('ssh -o StrictHostKeyChecking=no -o CheckHostIP=no -T git@github.com', timeout=30, shell=True) + c = subprocess_run( + "ssh -o StrictHostKeyChecking=no -o CheckHostIP=no -T git@github.com", + timeout=30, + shell=True, + ) res = c.returncode == 1 except KeyboardInterrupt: warnings.warn( - "KeyboardInterrupt while checking GitHub ssh access", RuntimeWarning, stacklevel=1 + "KeyboardInterrupt while checking GitHub ssh access", + RuntimeWarning, + stacklevel=1, ) except Exception: pass global HAS_WARNED_GITHUB if not res and not HAS_WARNED_GITHUB: + warnings.warn("Cannot connect to GitHub via SSH", RuntimeWarning, stacklevel=1) warnings.warn( - 'Cannot connect to GitHub via SSH', RuntimeWarning, stacklevel=1 - ) - warnings.warn( - 'Will skip tests requiring SSH access to GitHub', RuntimeWarning, stacklevel=1 + "Will skip tests requiring SSH access to GitHub", RuntimeWarning, stacklevel=1 ) HAS_WARNED_GITHUB = True return res @@ -87,25 +93,28 @@ def check_for_mercurial(): TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') +PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, "pypi") WE_HAVE_HG = check_for_mercurial() def pytest_runtest_setup(item): - if item.get_closest_marker('needs_internet') is not None and not WE_HAVE_INTERNET: - pytest.skip('requires internet') - if item.get_closest_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS: - pytest.skip('requires github ssh') - if item.get_closest_marker('needs_hg') is not None and not WE_HAVE_HG: - pytest.skip('requires mercurial') - if item.get_closest_marker('skip_py38') is not None and ( + if item.get_closest_marker("needs_internet") is not None and not WE_HAVE_INTERNET: + pytest.skip("requires internet") + if ( + item.get_closest_marker("needs_github_ssh") is not None + and not WE_HAVE_GITHUB_SSH_KEYS + ): + pytest.skip("requires github ssh") + if item.get_closest_marker("needs_hg") is not None and not WE_HAVE_HG: + pytest.skip("requires mercurial") + if item.get_closest_marker("skip_py38") is not None and ( sys.version_info[:2] == (3, 8) ): - pytest.skip('test not applicable on python 3.8') - if item.get_closest_marker('skip_osx') is not None and sys.platform == 'darwin': - pytest.skip('test does not apply on OSX') - if item.get_closest_marker('skip_windows') is not None and (os.name == 'nt'): - pytest.skip('test does not run on windows') + pytest.skip("test not applicable on python 3.8") + if item.get_closest_marker("skip_osx") is not None and sys.platform == "darwin": + pytest.skip("test does not apply on OSX") + if item.get_closest_marker("skip_windows") is not None and (os.name == "nt"): + pytest.skip("test does not run on windows") WE_HAVE_INTERNET = check_internet() @@ -138,7 +147,11 @@ def install(self, package, value, dev=False): def remove(self, package, dev=False): section = "packages" if not dev else "dev-packages" - if not dev and package not in self.document[section] and package in self.document["dev-packages"]: + if ( + not dev + and package not in self.document[section] + and package in self.document["dev-packages"] + ): section = "dev-packages" del self.document[section][package] self.write() @@ -171,6 +184,7 @@ def get_fixture_path(cls, path, fixtures="test_artifacts"): class _PipenvInstance: """An instance of a Pipenv Project...""" + def __init__(self, pipfile=True, capfd=None, index_url=None): self.index_url = index_url self.pypi = None @@ -182,7 +196,7 @@ def __init__(self, pipfile=True, capfd=None, index_url=None): os.environ.pop("PIPENV_CUSTOM_VENV_NAME", None) self.original_dir = Path(__file__).parent.parent.parent - self._path = TemporaryDirectory(prefix='pipenv-', suffix="-tests") + self._path = TemporaryDirectory(prefix="pipenv-", suffix="-tests") path = Path(self._path.name) try: self.path = str(path.resolve()) @@ -192,14 +206,14 @@ def __init__(self, pipfile=True, capfd=None, index_url=None): # set file creation perms self.pipfile_path = None - p_path = os.sep.join([self.path, 'Pipfile']) + p_path = os.sep.join([self.path, "Pipfile"]) self.pipfile_path = p_path if pipfile: with contextlib.suppress(FileNotFoundError): os.remove(p_path) - with open(p_path, 'a'): + with open(p_path, "a"): os.utime(p_path, None) self._pipfile = _Pipfile(Path(p_path), index=self.index_url) @@ -210,7 +224,7 @@ def __enter__(self): return self def __exit__(self, *args): - warn_msg = 'Failed to remove resource: {!r}' + warn_msg = "Failed to remove resource: {!r}" if self.pipfile_path: with contextlib.suppress(OSError): os.remove(self.pipfile_path) @@ -249,7 +263,7 @@ def pipenv(self, cmd, block=True): if err: r.stderr_bytes = r.stderr_bytes + err if block: - print(f'$ pipenv {cmd}') + print(f"$ pipenv {cmd}") print(r.stdout) print(r.stderr, file=sys.stderr) if r.returncode != 0: @@ -260,7 +274,7 @@ def pipenv(self, cmd, block=True): @property def pipfile(self): - p_path = os.sep.join([self.path, 'Pipfile']) + p_path = os.sep.join([self.path, "Pipfile"]) with open(p_path) as f: return tomlkit.loads(f.read()) @@ -272,7 +286,7 @@ def lockfile(self): @property def lockfile_path(self): - return os.sep.join([self.path, 'Pipfile.lock']) + return os.sep.join([self.path, "Pipfile.lock"]) if sys.version_info[:2] <= (3, 8): @@ -287,6 +301,7 @@ def _rmtree_func(path, ignore_errors=True, onerror=None): # Ignore removal failures where the file doesn't exist if exc.errno != errno.ENOENT: raise + else: _rmtree_func = _rmtree @@ -300,9 +315,13 @@ def pipenv_instance_pypi(capfdbinary, monkeypatch): os.environ["CI"] = "1" os.environ["PIPENV_DONT_USE_PYENV"] = "1" warnings.simplefilter("ignore", category=ResourceWarning) - warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") + warnings.filterwarnings( + "ignore", category=ResourceWarning, message="unclosed.*" + ) try: - yield functools.partial(_PipenvInstance, capfd=capfdbinary, index_url="https://pypi.org/simple") + yield functools.partial( + _PipenvInstance, capfd=capfdbinary, index_url="https://pypi.org/simple" + ) finally: os.umask(original_umask) @@ -316,9 +335,13 @@ def pipenv_instance_private_pypi(capfdbinary, monkeypatch): os.environ["CI"] = "1" os.environ["PIPENV_DONT_USE_PYENV"] = "1" warnings.simplefilter("ignore", category=ResourceWarning) - warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") + warnings.filterwarnings( + "ignore", category=ResourceWarning, message="unclosed.*" + ) try: - yield functools.partial(_PipenvInstance, capfd=capfdbinary, index_url=DEFAULT_PRIVATE_PYPI_SERVER) + yield functools.partial( + _PipenvInstance, capfd=capfdbinary, index_url=DEFAULT_PRIVATE_PYPI_SERVER + ) finally: os.umask(original_umask) diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index be206ea30..09ac867f9 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -20,45 +20,55 @@ def test_pipenv_where(pipenv_instance_pypi): @pytest.mark.cli def test_pipenv_venv(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('install dataclasses-json') + c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_path = c.stdout.strip() assert os.path.isdir(venv_path) @pytest.mark.cli -@pytest.mark.skipif(sys.version_info[:2] == (3, 8) and os.name == "nt", reason="Python 3.8 on Windows is not supported") +@pytest.mark.skipif( + sys.version_info[:2] == (3, 8) and os.name == "nt", + reason="Python 3.8 on Windows is not supported", +) def test_pipenv_py(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('--python python') + c = p.pipenv("--python python") assert c.returncode == 0 - c = p.pipenv('--py') + c = p.pipenv("--py") assert c.returncode == 0 python = c.stdout.strip() - assert os.path.basename(python).startswith('python') + assert os.path.basename(python).startswith("python") @pytest.mark.cli -@pytest.mark.skipif(os.name == 'nt' and sys.version_info[:2] == (3, 8), reason='Test issue with windows 3.8 CIs') +@pytest.mark.skipif( + os.name == "nt" and sys.version_info[:2] == (3, 8), + reason="Test issue with windows 3.8 CIs", +) def test_pipenv_site_packages(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('--python python --site-packages') + c = p.pipenv("--python python --site-packages") assert c.returncode == 0 - assert 'Making site-packages available' in c.stderr + assert "Making site-packages available" in c.stderr # no-global-site-packages.txt under stdlib dir should not exist. - c = p.pipenv('run python -c "import sysconfig; print(sysconfig.get_path(\'stdlib\'))"') + c = p.pipenv( + "run python -c \"import sysconfig; print(sysconfig.get_path('stdlib'))\"" + ) assert c.returncode == 0 stdlib_path = c.stdout.strip() - assert not os.path.isfile(os.path.join(stdlib_path, 'no-global-site-packages.txt')) + assert not os.path.isfile( + os.path.join(stdlib_path, "no-global-site-packages.txt") + ) @pytest.mark.cli def test_pipenv_support(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('--support') + c = p.pipenv("--support") assert c.returncode == 0 assert c.stdout @@ -66,14 +76,14 @@ def test_pipenv_support(pipenv_instance_pypi): @pytest.mark.cli def test_pipenv_rm(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('--python python') + c = p.pipenv("--python python") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_path = c.stdout.strip() assert os.path.isdir(venv_path) - c = p.pipenv('--rm') + c = p.pipenv("--rm") assert c.returncode == 0 assert c.stdout assert not os.path.isdir(venv_path) @@ -82,7 +92,7 @@ def test_pipenv_rm(pipenv_instance_pypi): @pytest.mark.cli def test_pipenv_graph(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('install tablib') + c = p.pipenv("install tablib") assert c.returncode == 0 graph = p.pipenv("graph") assert graph.returncode == 0 @@ -98,10 +108,12 @@ def test_pipenv_graph(pipenv_instance_pypi): @pytest.mark.cli def test_pipenv_graph_reverse(pipenv_instance_private_pypi): from pipenv.cli import cli - from pipenv.vendor.click.testing import CliRunner # not thread safe but graph is a tricky test + from pipenv.vendor.click.testing import ( + CliRunner, + ) # not thread safe but graph is a tricky test with pipenv_instance_private_pypi() as p: - c = p.pipenv('install tablib==0.13.0') + c = p.pipenv("install tablib==0.13.0") assert c.returncode == 0 cli_runner = CliRunner(mix_stderr=False) c = cli_runner.invoke(cli, "graph --reverse") @@ -109,58 +121,64 @@ def test_pipenv_graph_reverse(pipenv_instance_private_pypi): output = c.stdout requests_dependency = [ - ('backports.csv', 'backports.csv'), - ('odfpy', 'odfpy'), - ('openpyxl', 'openpyxl>=2.4.0'), - ('pyyaml', 'pyyaml'), - ('xlrd', 'xlrd'), - ('xlwt', 'xlwt'), + ("backports.csv", "backports.csv"), + ("odfpy", "odfpy"), + ("openpyxl", "openpyxl>=2.4.0"), + ("pyyaml", "pyyaml"), + ("xlrd", "xlrd"), + ("xlwt", "xlwt"), ] for dep_name, dep_constraint in requests_dependency: - pat = fr'{dep_name}==[\d.]+' - dep_match = re.search(pat, - output, - flags=re.MULTILINE | re.IGNORECASE) - assert dep_match is not None, f'{pat} not found in {output}' + pat = rf"{dep_name}==[\d.]+" + dep_match = re.search(pat, output, flags=re.MULTILINE | re.IGNORECASE) + assert dep_match is not None, f"{pat} not found in {output}" # openpyxl should be indented - if dep_name == 'openpyxl': - openpyxl_dep = re.search(r'^openpyxl', - output, - flags=re.MULTILINE | re.IGNORECASE) - assert openpyxl_dep is None, f'openpyxl should not appear at beginning of lines in {output}' - - if sys.version_info[:2] == (3, 12): - assert 'openpyxl==2.5.4 [requires: et_xmlfile]' in output - else: - assert 'openpyxl==2.5.4 [requires: et-xmlfile]' in output + if dep_name == "openpyxl": + openpyxl_dep = re.search( + r"^openpyxl", output, flags=re.MULTILINE | re.IGNORECASE + ) + assert ( + openpyxl_dep is None + ), f"openpyxl should not appear at beginning of lines in {output}" + assert "openpyxl==2.5.4 [requires: et_xmlfile]" in output else: - dep_match = re.search(fr'^[ -]*{dep_name}==[\d.]+$', - output, - flags=re.MULTILINE | re.IGNORECASE) - assert dep_match is not None, f'{dep_name} not found at beginning of line in {output}' - - dep_requests_match = re.search(fr'└── tablib==0.13.0 \[requires: {dep_constraint}', - output, - flags=re.MULTILINE | re.IGNORECASE) - assert dep_requests_match is not None, f'constraint {dep_constraint} not found in {output}' + dep_match = re.search( + rf"^[ -]*{dep_name}==[\d.]+$", + output, + flags=re.MULTILINE | re.IGNORECASE, + ) + assert ( + dep_match is not None + ), f"{dep_name} not found at beginning of line in {output}" + + dep_requests_match = re.search( + rf"└── tablib==0.13.0 \[requires: {dep_constraint}", + output, + flags=re.MULTILINE | re.IGNORECASE, + ) + assert ( + dep_requests_match is not None + ), f"constraint {dep_constraint} not found in {output}" assert dep_requests_match.start() > dep_match.start() -@pytest.mark.skip(reason="There is a disputed vulnerability about pip 24.0 messing up this test.") +@pytest.mark.skip( + reason="There is a disputed vulnerability about pip 24.0 messing up this test." +) @pytest.mark.cli -@pytest.mark.needs_internet(reason='required by check') +@pytest.mark.needs_internet(reason="required by check") def test_pipenv_check(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - c = p.pipenv('install pyyaml') + c = p.pipenv("install pyyaml") assert c.returncode == 0 - c = p.pipenv('check --use-installed') + c = p.pipenv("check --use-installed") assert c.returncode != 0 - assert 'pyyaml' in c.stdout - c = p.pipenv('uninstall pyyaml') + assert "pyyaml" in c.stdout + c = p.pipenv("uninstall pyyaml") assert c.returncode == 0 - c = p.pipenv('install six') + c = p.pipenv("install six") assert c.returncode == 0 c = p.pipenv("run python -m pip install --upgrade pip") assert c.returncode == 0 @@ -171,48 +189,51 @@ def test_pipenv_check(pipenv_instance_private_pypi): # the issue above is still not resolved. # added also 51499 # https://github.com/pypa/wheel/issues/481 - c = p.pipenv('check --use-installed --ignore 35015 -i 51457 -i 51499') + c = p.pipenv("check --use-installed --ignore 35015 -i 51457 -i 51499") assert c.returncode == 0 - assert 'Ignoring' in c.stderr + assert "Ignoring" in c.stderr @pytest.mark.cli -@pytest.mark.needs_internet(reason='required by check') +@pytest.mark.needs_internet(reason="required by check") @pytest.mark.parametrize("category", ["CVE", "packages"]) def test_pipenv_check_check_lockfile_categories(pipenv_instance_pypi, category): with pipenv_instance_pypi() as p: - c = p.pipenv(f'install wheel==0.37.1 --categories={category}') + c = p.pipenv(f"install wheel==0.37.1 --categories={category}") assert c.returncode == 0 - c = p.pipenv(f'check --categories={category}') + c = p.pipenv(f"check --categories={category}") assert c.returncode != 0 - assert 'wheel' in c.stdout + assert "wheel" in c.stdout @pytest.mark.cli -@pytest.mark.skipif(sys.version_info[:2] == (3, 8) and os.name == "nt", reason="This test is not working om Windows Python 3. 8") +@pytest.mark.skipif( + sys.version_info[:2] == (3, 8) and os.name == "nt", + reason="This test is not working om Windows Python 3. 8", +) def test_pipenv_clean(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open('setup.py', 'w') as f: + with open("setup.py", "w") as f: f.write('from setuptools import setup; setup(name="empty")') - c = p.pipenv('install -e .') + c = p.pipenv("install -e .") assert c.returncode == 0 - c = p.pipenv(f'run pip install -i {p.index_url} six') + c = p.pipenv(f"run pip install -i {p.index_url} six") assert c.returncode == 0 - c = p.pipenv('clean') + c = p.pipenv("clean") assert c.returncode == 0 - assert 'six' in c.stdout, f"{c.stdout} -- STDERR: {c.stderr}" + assert "six" in c.stdout, f"{c.stdout} -- STDERR: {c.stderr}" @pytest.mark.cli def test_venv_envs(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - assert p.pipenv('--envs').stdout + assert p.pipenv("--envs").stdout @pytest.mark.cli def test_bare_output(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - assert p.pipenv('').stdout + assert p.pipenv("").stdout @pytest.mark.cli @@ -224,15 +245,15 @@ def test_scripts(pipenv_instance_pypi): pyver = "which python" """.strip() f.write(contents) - c = p.pipenv('scripts') - assert 'pyver' in c.stdout - assert 'which python' in c.stdout + c = p.pipenv("scripts") + assert "pyver" in c.stdout + assert "which python" in c.stdout @pytest.mark.cli def test_help(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - assert p.pipenv('--help').stdout + assert p.pipenv("--help").stdout @pytest.mark.cli @@ -245,28 +266,27 @@ def test_man(pipenv_instance_pypi): @pytest.mark.cli def test_install_parse_error(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - # Make sure unparsable packages don't wind up in the pipfile # Escape $ for shell input - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] [dev-packages] """.strip() f.write(contents) - c = p.pipenv('install requests u/\\/p@r\\$34b13+pkg') + c = p.pipenv("install requests u/\\/p@r\\$34b13+pkg") assert c.returncode != 0 - assert 'u/\\/p@r$34b13+pkg' not in p.pipfile['packages'] + assert "u/\\/p@r$34b13+pkg" not in p.pipfile["packages"] @pytest.mark.skip(reason="This test clears the cache that other tests may be using.") @pytest.mark.cli def test_pipenv_clear(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('--clear') + c = p.pipenv("--clear") assert c.returncode == 0 - assert 'Clearing caches' in c.stdout + assert "Clearing caches" in c.stdout @pytest.mark.outdated @@ -278,46 +298,46 @@ def test_pipenv_outdated_prerelease(pipenv_instance_pypi): sqlalchemy = "<=1.2.3" """.strip() f.write(contents) - c = p.pipenv('update --pre --outdated') + c = p.pipenv("update --pre --outdated") assert c.returncode == 0 @pytest.mark.cli def test_pipenv_verify_without_pipfile(pipenv_instance_pypi): with pipenv_instance_pypi(pipfile=False) as p: - c = p.pipenv('verify') + c = p.pipenv("verify") assert c.returncode == 1 - assert 'No Pipfile present at project home.' in c.stderr + assert "No Pipfile present at project home." in c.stderr @pytest.mark.cli def test_pipenv_verify_without_pipfile_lock(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('verify') + c = p.pipenv("verify") assert c.returncode == 1 - assert 'Pipfile.lock is out-of-date.' in c.stderr + assert "Pipfile.lock is out-of-date." in c.stderr @pytest.mark.cli def test_pipenv_verify_locked_passing(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - p.pipenv('lock') - c = p.pipenv('verify') + p.pipenv("lock") + c = p.pipenv("verify") assert c.returncode == 0 - assert 'Pipfile.lock is up-to-date.' in c.stdout + assert "Pipfile.lock is up-to-date." in c.stdout @pytest.mark.cli def test_pipenv_verify_locked_outdated_failing(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - p.pipenv('lock') + p.pipenv("lock") # modify the Pipfile - pf = Path(p.path).joinpath('Pipfile') + pf = Path(p.path).joinpath("Pipfile") pf_data = pf.read_text() - pf_new = re.sub(r'\[packages\]', '[packages]\nrequests = "*"', pf_data) + pf_new = re.sub(r"\[packages\]", '[packages]\nrequests = "*"', pf_data) pf.write_text(pf_new) - c = p.pipenv('verify') + c = p.pipenv("verify") assert c.returncode == 1 - assert 'Pipfile.lock is out-of-date.' in c.stderr + assert "Pipfile.lock is out-of-date." in c.stderr diff --git a/tests/integration/test_dot_venv.py b/tests/integration/test_dot_venv.py index 23f42aeea..bf4c729ba 100644 --- a/tests/integration/test_dot_venv.py +++ b/tests/integration/test_dot_venv.py @@ -1,4 +1,3 @@ - import os from pathlib import Path from tempfile import TemporaryDirectory @@ -13,11 +12,11 @@ @pytest.mark.parametrize("true_value", TRUE_VALUES) def test_venv_in_project(true_value, pipenv_instance_pypi): with temp_environ(): - os.environ['PIPENV_VENV_IN_PROJECT'] = true_value + os.environ["PIPENV_VENV_IN_PROJECT"] = true_value with pipenv_instance_pypi() as p: - c = p.pipenv('install dataclasses-json') + c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - assert p.path in p.pipenv('--venv').stdout + assert p.path in p.pipenv("--venv").stdout @pytest.mark.dotvenv @@ -25,23 +24,23 @@ def test_venv_in_project(true_value, pipenv_instance_pypi): def test_venv_in_project_disabled_ignores_venv(false_value, pipenv_instance_pypi): venv_name = "my_project" with temp_environ(): - os.environ['PIPENV_VENV_IN_PROJECT'] = false_value + os.environ["PIPENV_VENV_IN_PROJECT"] = false_value with pipenv_instance_pypi() as p: - file_path = os.path.join(p.path, '.venv') - with open(file_path, 'w') as f: + file_path = os.path.join(p.path, ".venv") + with open(file_path, "w") as f: f.write(venv_name) with temp_environ(), TemporaryDirectory( - prefix='pipenv-', suffix='temp_workon_home' + prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: - os.environ['WORKON_HOME'] = workon_home - c = p.pipenv('install dataclasses-json') + os.environ["WORKON_HOME"] = workon_home + c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_loc = Path(c.stdout.strip()).resolve() assert venv_loc.exists() - assert venv_loc.joinpath('.project').exists() + assert venv_loc.joinpath(".project").exists() venv_path = Path(venv_loc).resolve() venv_expected_path = Path(workon_home).joinpath(venv_name).resolve() assert os.path.samefile(venv_path, venv_expected_path) @@ -51,35 +50,37 @@ def test_venv_in_project_disabled_ignores_venv(false_value, pipenv_instance_pypi @pytest.mark.parametrize("true_value", TRUE_VALUES) def test_venv_at_project_root(true_value, pipenv_instance_pypi): with temp_environ(), pipenv_instance_pypi() as p: - os.environ['PIPENV_VENV_IN_PROJECT'] = true_value - c = p.pipenv('install') + os.environ["PIPENV_VENV_IN_PROJECT"] = true_value + c = p.pipenv("install") assert c.returncode == 0 - assert p.path in p.pipenv('--venv').stdout - del os.environ['PIPENV_VENV_IN_PROJECT'] - os.mkdir('subdir') - os.chdir('subdir') + assert p.path in p.pipenv("--venv").stdout + del os.environ["PIPENV_VENV_IN_PROJECT"] + os.mkdir("subdir") + os.chdir("subdir") # should still detect installed - assert p.path in p.pipenv('--venv').stdout + assert p.path in p.pipenv("--venv").stdout @pytest.mark.dotvenv @pytest.mark.parametrize("false_value", FALSE_VALUES) -def test_venv_in_project_disabled_with_existing_venv_dir(false_value, pipenv_instance_pypi): +def test_venv_in_project_disabled_with_existing_venv_dir( + false_value, pipenv_instance_pypi +): venv_name = "my_project" with temp_environ(), pipenv_instance_pypi() as p, TemporaryDirectory( prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: - os.environ['PIPENV_VENV_IN_PROJECT'] = false_value - os.environ['PIPENV_CUSTOM_VENV_NAME'] = venv_name + os.environ["PIPENV_VENV_IN_PROJECT"] = false_value + os.environ["PIPENV_CUSTOM_VENV_NAME"] = venv_name os.environ["WORKON_HOME"] = workon_home - os.mkdir('.venv') - c = p.pipenv('install') + os.mkdir(".venv") + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_loc = Path(c.stdout.strip()).resolve() assert venv_loc.exists() - assert venv_loc.joinpath('.project').exists() + assert venv_loc.joinpath(".project").exists() venv_path = Path(venv_loc).resolve() venv_expected_path = Path(workon_home).joinpath(venv_name).resolve() assert os.path.samefile(venv_path, venv_expected_path) @@ -88,36 +89,36 @@ def test_venv_in_project_disabled_with_existing_venv_dir(false_value, pipenv_ins @pytest.mark.dotvenv def test_reuse_previous_venv(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - os.mkdir('.venv') - c = p.pipenv('install dataclasses-json') + os.mkdir(".venv") + c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - assert p.path in p.pipenv('--venv').stdout + assert p.path in p.pipenv("--venv").stdout @pytest.mark.dotvenv -@pytest.mark.parametrize('venv_name', ('test-venv', os.path.join('foo', 'test-venv'))) +@pytest.mark.parametrize("venv_name", ("test-venv", os.path.join("foo", "test-venv"))) def test_venv_file(venv_name, pipenv_instance_pypi): """Tests virtualenv creation when a .venv file exists at the project root and contains a venv name. """ with pipenv_instance_pypi() as p: - file_path = os.path.join(p.path, '.venv') - with open(file_path, 'w') as f: + file_path = os.path.join(p.path, ".venv") + with open(file_path, "w") as f: f.write(venv_name) with temp_environ(), TemporaryDirectory( - prefix='pipenv-', suffix='temp_workon_home' + prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: - os.environ['WORKON_HOME'] = workon_home + os.environ["WORKON_HOME"] = workon_home - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_loc = Path(c.stdout.strip()).resolve() assert venv_loc.exists() - assert venv_loc.joinpath('.project').exists() + assert venv_loc.joinpath(".project").exists() venv_path = Path(venv_loc).resolve() if os.path.sep in venv_name: venv_expected_path = Path(p.path).joinpath(venv_name) @@ -128,26 +129,25 @@ def test_venv_file(venv_name, pipenv_instance_pypi): @pytest.mark.dotvenv def test_empty_venv_file(pipenv_instance_pypi): - """Tests virtualenv creation when an empty .venv file exists at the project root - """ + """Tests virtualenv creation when an empty .venv file exists at the project root""" with pipenv_instance_pypi() as p: - file_path = os.path.join(p.path, '.venv') - with open(file_path, 'w'): + file_path = os.path.join(p.path, ".venv") + with open(file_path, "w"): pass with temp_environ(), TemporaryDirectory( - prefix='pipenv-', suffix='temp_workon_home' + prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: - os.environ['WORKON_HOME'] = workon_home + os.environ["WORKON_HOME"] = workon_home - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_loc = Path(c.stdout.strip()).absolute() assert venv_loc.exists() - assert venv_loc.joinpath('.project').exists() + assert venv_loc.joinpath(".project").exists() venv_path = Path(venv_loc) venv_path_parent = Path(venv_path.parent) assert venv_path_parent == Path(workon_home) @@ -155,36 +155,34 @@ def test_empty_venv_file(pipenv_instance_pypi): @pytest.mark.dotvenv def test_venv_in_project_default_when_venv_exists(pipenv_instance_pypi): - """Tests virtualenv creation when a .venv file exists at the project root. - """ + """Tests virtualenv creation when a .venv file exists at the project root.""" with temp_environ(), pipenv_instance_pypi() as p, TemporaryDirectory( - prefix='pipenv-', suffix='-test_venv' + prefix="pipenv-", suffix="-test_venv" ) as venv_path: - file_path = os.path.join(p.path, '.venv') - with open(file_path, 'w') as f: + file_path = os.path.join(p.path, ".venv") + with open(file_path, "w") as f: f.write(venv_path) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_loc = Path(c.stdout.strip()) - assert venv_loc.joinpath('.project').exists() + assert venv_loc.joinpath(".project").exists() assert venv_loc == Path(venv_path) @pytest.mark.dotenv def test_venv_name_accepts_custom_name_environment_variable(pipenv_instance_pypi): - """Tests that virtualenv reads PIPENV_CUSTOM_VENV_NAME and accepts it as a name - """ + """Tests that virtualenv reads PIPENV_CUSTOM_VENV_NAME and accepts it as a name""" with pipenv_instance_pypi() as p: test_name = "sensible_custom_venv_name" with temp_environ(): - os.environ['PIPENV_CUSTOM_VENV_NAME'] = test_name - c = p.pipenv('install') + os.environ["PIPENV_CUSTOM_VENV_NAME"] = test_name + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_path = c.stdout.strip() assert test_name == Path(venv_path).parts[-1] diff --git a/tests/integration/test_import_requirements.py b/tests/integration/test_import_requirements.py index 07080fbb7..0f87bac31 100644 --- a/tests/integration/test_import_requirements.py +++ b/tests/integration/test_import_requirements.py @@ -13,69 +13,98 @@ @pytest.mark.cli @pytest.mark.deploy @pytest.mark.system -@mock.patch("pipenv.utils.dependencies.unpack_url", mock.MagicMock(return_value=File("/some/path/to/project", content_type=None))) +@mock.patch( + "pipenv.utils.dependencies.unpack_url", + mock.MagicMock(return_value=File("/some/path/to/project", content_type=None)), +) @mock.patch("pipenv.utils.dependencies.find_package_name_from_directory") -def test_auth_with_pw_redacted(mock_find_package_name_from_directory, pipenv_instance_pypi): +def test_auth_with_pw_redacted( + mock_find_package_name_from_directory, pipenv_instance_pypi +): mock_find_package_name_from_directory.return_value = "myproject" with pipenv_instance_pypi() as p: p.pipenv("run shell") project = Project() requirements_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) - requirements_file.write("""git+https://${AUTH_USER}:mypw1@github.com/user/myproject.git@main#egg=myproject""") + requirements_file.write( + """git+https://${AUTH_USER}:mypw1@github.com/user/myproject.git@main#egg=myproject""" + ) requirements_file.close() import_requirements(project, r=requirements_file.name) os.unlink(requirements_file.name) - assert p.pipfile["packages"]["myproject"] == {'git': 'git+https://${AUTH_USER}:****@github.com/user/myproject.git', 'ref': 'main'} + assert p.pipfile["packages"]["myproject"] == {'git': 'https://${AUTH_USER}:****@github.com/user/myproject.git', 'ref': 'main'} @pytest.mark.cli @pytest.mark.deploy @pytest.mark.system -@mock.patch("pipenv.utils.dependencies.unpack_url", mock.MagicMock(return_value=File("/some/path/to/project", content_type=None))) +@mock.patch( + "pipenv.utils.dependencies.unpack_url", + mock.MagicMock(return_value=File("/some/path/to/project", content_type=None)), +) @mock.patch("pipenv.utils.dependencies.find_package_name_from_directory") -def test_auth_with_username_redacted(mock_find_package_name_from_directory, pipenv_instance_pypi): +def test_auth_with_username_redacted( + mock_find_package_name_from_directory, pipenv_instance_pypi +): mock_find_package_name_from_directory.return_value = "myproject" with pipenv_instance_pypi() as p: p.pipenv("run shell") project = Project() requirements_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) - requirements_file.write("""git+https://username@github.com/user/myproject.git@main#egg=myproject""") + requirements_file.write( + """git+https://username@github.com/user/myproject.git@main#egg=myproject""" + ) requirements_file.close() import_requirements(project, r=requirements_file.name) os.unlink(requirements_file.name) - assert p.pipfile["packages"]["myproject"] == {'git': 'git+https://****@github.com/user/myproject.git', 'ref': 'main'} + assert p.pipfile["packages"]["myproject"] == {'git': 'https://****@github.com/user/myproject.git', 'ref': 'main'} + @pytest.mark.cli @pytest.mark.deploy @pytest.mark.system -@mock.patch("pipenv.utils.dependencies.unpack_url", mock.MagicMock(return_value=File("/some/path/to/project", content_type=None))) +@mock.patch( + "pipenv.utils.dependencies.unpack_url", + mock.MagicMock(return_value=File("/some/path/to/project", content_type=None)), +) @mock.patch("pipenv.utils.dependencies.find_package_name_from_directory") -def test_auth_with_pw_are_variables_passed_to_pipfile(mock_find_package_name_from_directory, pipenv_instance_pypi): +def test_auth_with_pw_are_variables_passed_to_pipfile( + mock_find_package_name_from_directory, pipenv_instance_pypi +): mock_find_package_name_from_directory.return_value = "myproject" with pipenv_instance_pypi() as p: p.pipenv("run shell") project = Project() requirements_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) - requirements_file.write("""git+https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git@main#egg=myproject""") + requirements_file.write( + """git+https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git@main#egg=myproject""" + ) requirements_file.close() import_requirements(project, r=requirements_file.name) os.unlink(requirements_file.name) - assert p.pipfile["packages"]["myproject"] == {'git': 'git+https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git', 'ref': 'main'} + assert p.pipfile["packages"]["myproject"] == {'git': 'https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git', 'ref': 'main'} @pytest.mark.cli @pytest.mark.deploy @pytest.mark.system -@mock.patch("pipenv.utils.dependencies.unpack_url", mock.MagicMock(return_value=File("/some/path/to/project", content_type=None))) +@mock.patch( + "pipenv.utils.dependencies.unpack_url", + mock.MagicMock(return_value=File("/some/path/to/project", content_type=None)), +) @mock.patch("pipenv.utils.dependencies.find_package_name_from_directory") -def test_auth_with_only_username_variable_passed_to_pipfile(mock_find_package_name_from_directory, pipenv_instance_pypi): +def test_auth_with_only_username_variable_passed_to_pipfile( + mock_find_package_name_from_directory, pipenv_instance_pypi +): mock_find_package_name_from_directory.return_value = "myproject" with pipenv_instance_pypi() as p: p.pipenv("run shell") project = Project() requirements_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) - requirements_file.write("""git+https://${AUTH_USER}@github.com/user/myproject.git@main#egg=myproject""") + requirements_file.write( + """git+https://${AUTH_USER}@github.com/user/myproject.git@main#egg=myproject""" + ) requirements_file.close() import_requirements(project, r=requirements_file.name) os.unlink(requirements_file.name) - assert p.pipfile["packages"]["myproject"] == {'git': 'git+https://${AUTH_USER}@github.com/user/myproject.git', 'ref': 'main'} + assert p.pipfile["packages"]["myproject"] == {'git': 'https://${AUTH_USER}@github.com/user/myproject.git', 'ref': 'main'} diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index ee5f68785..cc16b5a6f 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -92,7 +92,7 @@ def test_install_without_dev(pipenv_instance_private_pypi): @pytest.mark.basic @pytest.mark.install def test_install_with_version_req_default_operator(pipenv_instance_private_pypi): - """Ensure that running `pipenv install` work when spec is package = "X.Y.Z". """ + """Ensure that running `pipenv install` work when spec is package = "X.Y.Z".""" with pipenv_instance_private_pypi() as p: with open(p.pipfile_path, "w") as f: contents = """ @@ -163,7 +163,9 @@ def test_pinned_pipfile(pipenv_instance_pypi): @pytest.mark.install @pytest.mark.resolver @pytest.mark.backup_resolver -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_backup_resolver(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: with open(p.pipfile_path, "w") as f: @@ -232,15 +234,10 @@ def test_bad_packages(pipenv_instance_private_pypi): @pytest.mark.install @pytest.mark.requirements def test_requirements_to_pipfile(pipenv_instance_private_pypi): - with pipenv_instance_private_pypi(pipfile=False) as p: - # Write a requirements file with open("requirements.txt", "w") as f: - f.write( - f"-i {p.index_url}\n" - "requests[socks]==2.19.1\n" - ) + f.write(f"-i {p.index_url}\nrequests[socks]==2.19.1\n") c = p.pipenv("install") assert c.returncode == 0 @@ -251,8 +248,8 @@ def test_requirements_to_pipfile(pipenv_instance_private_pypi): assert "requests" in p.pipfile["packages"] assert "extras" in p.pipfile["packages"]["requests"] assert not any( - source['url'] == 'https://private.pypi.org/simple' - for source in p.pipfile['source'] + source["url"] == "https://private.pypi.org/simple" + for source in p.pipfile["source"] ) # assert stuff in lockfile assert "requests" in p.lockfile["default"] @@ -301,8 +298,7 @@ def test_clean_on_empty_venv(pipenv_instance_pypi): @pytest.mark.basic @pytest.mark.install def test_install_does_not_extrapolate_environ(pipenv_instance_private_pypi): - """Ensure environment variables are not expanded in lock file. - """ + """Ensure environment variables are not expanded in lock file.""" with temp_environ(), pipenv_instance_private_pypi() as p: os.environ["PYPI_URL"] = p.pypi @@ -344,8 +340,7 @@ def test_editable_no_args(pipenv_instance_pypi): @pytest.mark.install @pytest.mark.virtualenv def test_install_venv_project_directory(pipenv_instance_pypi): - """Test the project functionality during virtualenv creation. - """ + """Test the project functionality during virtualenv creation.""" with pipenv_instance_pypi() as p, temp_environ(), TemporaryDirectory( prefix="pipenv-", suffix="temp_workon_home" ) as workon_home: @@ -392,7 +387,7 @@ def test_install_creates_pipfile(pipenv_instance_pypi): assert c.returncode == 0 assert os.path.isfile(p.pipfile_path) python_version = str(sys.version_info.major) + "." + str(sys.version_info.minor) - assert p.pipfile["requires"] == {'python_version': python_version} + assert p.pipfile["requires"] == {"python_version": python_version} @pytest.mark.basic @@ -404,9 +399,10 @@ def test_create_pipfile_requires_python_full_version(pipenv_instance_private_pyp c = p.pipenv(f"--python {python_full_version}") assert c.returncode == 0 assert p.pipfile["requires"] == { - 'python_full_version': python_full_version, - 'python_version': python_version - } + "python_full_version": python_full_version, + "python_version": python_version, + } + @pytest.mark.basic @pytest.mark.install @@ -415,8 +411,9 @@ def test_install_with_pipfile_including_exact_python_version(pipenv_instance_pyp valid_version = f"{sys.version_info.major}.{sys.version_info.minor}" with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: - f.write(f""" + with open(p.pipfile_path, "w") as f: + f.write( + f""" [[source]] url = "https://test.pypi.org/simple" verify_ssl = true @@ -427,7 +424,8 @@ def test_install_with_pipfile_including_exact_python_version(pipenv_instance_pyp [requires] python_version = "{valid_version}" -""") +""" + ) c = p.pipenv("install") assert c.returncode == 0 @@ -437,6 +435,7 @@ def test_install_with_pipfile_including_exact_python_version(pipenv_instance_pyp c = p.pipenv("--rm") assert c.returncode == 0 + @pytest.mark.basic @pytest.mark.install @pytest.mark.virtualenv @@ -450,8 +449,9 @@ def test_install_with_pipfile_including_invalid_python_version(pipenv_instance_p with pipenv_instance_pypi() as p: for version in invalid_versions: - with open(p.pipfile_path, 'w') as f: - f.write(f""" + with open(p.pipfile_path, "w") as f: + f.write( + f""" [[source]] url = "https://test.pypi.org/simple" verify_ssl = true @@ -462,7 +462,8 @@ def test_install_with_pipfile_including_invalid_python_version(pipenv_instance_p [requires] python_version = "{version}" -""") +""" + ) c = p.pipenv("install") assert c.returncode != 0 @@ -489,7 +490,7 @@ def test_install_package_with_dots(pipenv_instance_private_pypi): @pytest.mark.install def test_rewrite_outline_table(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{}" @@ -502,7 +503,9 @@ def test_rewrite_outline_table(pipenv_instance_private_pypi): [packages.requests] version = "*" extras = ["socks"] - """.format(p.index_url, "{version = \"*\"}").strip() + """.format( + p.index_url, '{version = "*"}' + ).strip() f.write(contents) c = p.pipenv("install colorama") assert c.returncode == 0 @@ -518,7 +521,7 @@ def test_rewrite_outline_table(pipenv_instance_private_pypi): @pytest.mark.install def test_rewrite_outline_table_ooo(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{}" @@ -535,7 +538,9 @@ def test_rewrite_outline_table_ooo(pipenv_instance_private_pypi): [packages.requests] version = "*" extras = ["socks"] - """.format(p.index_url, "{version = \"*\"}").strip() + """.format( + p.index_url, '{version = "*"}' + ).strip() f.write(contents) c = p.pipenv("install colorama") assert c.returncode == 0 @@ -553,7 +558,6 @@ def test_install_dev_use_default_constraints(pipenv_instance_private_pypi): # See https://github.com/pypa/pipenv/issues/4371 # See https://github.com/pypa/pipenv/issues/2987 with pipenv_instance_private_pypi() as p: - c = p.pipenv("install requests==2.14.0") assert c.returncode == 0 assert "requests" in p.lockfile["default"] @@ -578,21 +582,27 @@ def test_install_dev_use_default_constraints(pipenv_instance_private_pypi): @pytest.mark.install @pytest.mark.needs_internet def test_install_does_not_exclude_packaging(pipenv_instance_pypi): - """Ensure that running `pipenv install` doesn't exclude packaging when its required. """ + """Ensure that running `pipenv install` doesn't exclude packaging when its required.""" with pipenv_instance_pypi() as p: c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - c = p.pipenv("""run python -c "from dataclasses_json import DataClassJsonMixin" """) + c = p.pipenv( + """run python -c "from dataclasses_json import DataClassJsonMixin" """ + ) assert c.returncode == 0 @pytest.mark.basic @pytest.mark.install @pytest.mark.needs_internet -@pytest.mark.skip(reason="pip 23.3 now vendors in truststore and so test assumptions invalid ") +@pytest.mark.skip( + reason="pip 23.3 now vendors in truststore and so test assumptions invalid " +) def test_install_will_supply_extra_pip_args(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv("""install -v dataclasses-json --extra-pip-args="--use-feature=truststore --proxy=test" """) + c = p.pipenv( + """install -v dataclasses-json --extra-pip-args="--use-feature=truststore --proxy=test" """ + ) assert c.returncode == 1 assert "truststore feature" in c.stdout @@ -601,7 +611,7 @@ def test_install_will_supply_extra_pip_args(pipenv_instance_pypi): @pytest.mark.install @pytest.mark.needs_internet def test_install_tarball_is_actually_installed(pipenv_instance_pypi): - """ Test case for Issue 5326""" + """Test case for Issue 5326""" with pipenv_instance_pypi() as p: with open(p.pipfile_path, "w") as f: contents = """ @@ -689,3 +699,30 @@ def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): "colorama", "build", ] + + +@pytest.mark.basic +@pytest.mark.install +def test_category_sorted_with_directive_when_insalling_with_extras( + pipenv_instance_private_pypi, +): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w+") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +atomicwrites = "*" +six = "*" + """.strip() + f.write(contents) + c = p.pipenv("install requests[socks]") + assert c.returncode == 0 + assert "requests" in p.pipfile["packages"] + assert "extras" in p.pipfile["packages"]["requests"] + assert list(p.pipfile["packages"].keys()) == [ + "atomicwrites", + "requests", + "six", + ] diff --git a/tests/integration/test_install_categories.py b/tests/integration/test_install_categories.py index 353b42a85..f96293c71 100644 --- a/tests/integration/test_install_categories.py +++ b/tests/integration/test_install_categories.py @@ -81,9 +81,10 @@ def test_multiple_category_install_from_requirements(pipenv_instance_private_pyp @pytest.mark.install @pytest.mark.local @pytest.mark.skipif(sys.version_info >= (3, 12), reason="test is not 3.12 compatible") -def test_multiple_category_install_proceeds_in_order_specified(pipenv_instance_private_pypi): - """Ensure -e .[extras] installs. - """ +def test_multiple_category_install_proceeds_in_order_specified( + pipenv_instance_private_pypi, +): + """Ensure -e .[extras] installs.""" with pipenv_instance_private_pypi() as p: setup_py = os.path.join(p.path, "setup.py") with open(setup_py, "w") as fh: @@ -103,18 +104,22 @@ def test_multiple_category_install_proceeds_in_order_specified(pipenv_instance_p ) """.strip() fh.write(contents) - with open(os.path.join(p.path, 'Pipfile'), 'w') as fh: - fh.write(""" + with open(os.path.join(p.path, "Pipfile"), "w") as fh: + fh.write( + """ [packages] testpipenv = {path = ".", editable = true, skip_resolver = true} [prereq] six = "*" - """.strip()) + """.strip() + ) c = p.pipenv("lock -v") assert c.returncode == 0 assert "testpipenv" in p.lockfile["default"] assert "testpipenv" not in p.lockfile["prereq"] assert "six" in p.lockfile["prereq"] - c = p.pipenv('sync --categories="prereq packages" --extra-pip-args="--no-build-isolation" -v') + c = p.pipenv( + 'sync --categories="prereq packages" --extra-pip-args="--no-build-isolation" -v' + ) assert c.returncode == 0 diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 98c3ed9a9..8d4f4bd8b 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -10,9 +10,8 @@ @pytest.mark.markers def test_package_environment_markers(pipenv_instance_private_pypi): - with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{}" @@ -23,16 +22,22 @@ def test_package_environment_markers(pipenv_instance_private_pypi): fake_package = {} [dev-packages] - """.format(p.index_url, "{version = \"*\", markers=\"os_name=='splashwear'\", index=\"testindex\"}").strip() + """.format( + p.index_url, + '{version = "*", markers="os_name==\'splashwear\'", index="testindex"}', + ).strip() f.write(contents) - c = p.pipenv('install -v') + c = p.pipenv("install -v") assert c.returncode == 0 - assert 'markers' in p.lockfile['default']['fake_package'], p.lockfile["default"] - assert p.lockfile['default']['fake_package']['markers'] == "os_name == 'splashwear'" - assert p.lockfile['default']['fake_package']['hashes'] == [ - 'sha256:1531e01a7f306f496721f425c8404f3cfd8d4933ee6daf4668fcc70059b133f3', - 'sha256:cf83dc3f6c34050d3360fbdf655b2652c56532e3028b1c95202611ba1ebdd624'] + assert "markers" in p.lockfile["default"]["fake_package"], p.lockfile["default"] + assert ( + p.lockfile["default"]["fake_package"]["markers"] == "os_name == 'splashwear'" + ) + assert p.lockfile["default"]["fake_package"]["hashes"] == [ + "sha256:1531e01a7f306f496721f425c8404f3cfd8d4933ee6daf4668fcc70059b133f3", + "sha256:cf83dc3f6c34050d3360fbdf655b2652c56532e3028b1c95202611ba1ebdd624", + ] c = p.pipenv('run python -c "import fake_package;"') assert c.returncode == 1 @@ -45,15 +50,17 @@ def test_platform_python_implementation_marker(pipenv_instance_private_pypi): incorrectly. """ with pipenv_instance_private_pypi() as p: - c = p.pipenv('install depends-on-marked-package') + c = p.pipenv("install depends-on-marked-package") assert c.returncode == 0 # depends-on-marked-package has an install_requires of # 'pytz; platform_python_implementation=="CPython"' # Verify that that marker shows up in our lockfile unaltered. - assert 'pytz' in p.lockfile['default'] - assert p.lockfile['default']['pytz'].get('markers') == \ - "platform_python_implementation == 'CPython'" + assert "pytz" in p.lockfile["default"] + assert ( + p.lockfile["default"]["pytz"].get("markers") + == "platform_python_implementation == 'CPython'" + ) @flaky @@ -61,18 +68,17 @@ def test_platform_python_implementation_marker(pipenv_instance_private_pypi): @pytest.mark.markers @pytest.mark.install def test_specific_package_environment_markers(pipenv_instance_pypi): - with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] six = {version = "*", os_name = "== 'splashwear'"} """.strip() f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - assert 'markers' in p.lockfile['default']['six'] + assert "markers" in p.lockfile["default"]["six"] c = p.pipenv('run python -c "import six;"') assert c.returncode == 1 @@ -81,10 +87,9 @@ def test_specific_package_environment_markers(pipenv_instance_pypi): @flaky @pytest.mark.markers def test_top_level_overrides_environment_markers(pipenv_instance_pypi): - """Top-level environment markers should take precedence. - """ + """Top-level environment markers should take precedence.""" with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] apscheduler = "*" @@ -92,10 +97,14 @@ def test_top_level_overrides_environment_markers(pipenv_instance_pypi): """.strip() f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - assert "markers" in p.lockfile['default']['funcsigs'], p.lockfile['default']['funcsigs'] - assert p.lockfile['default']['funcsigs']['markers'] == "os_name == 'splashwear'", p.lockfile['default']['funcsigs'] + assert "markers" in p.lockfile["default"]["funcsigs"], p.lockfile["default"][ + "funcsigs" + ] + assert ( + p.lockfile["default"]["funcsigs"]["markers"] == "os_name == 'splashwear'" + ), p.lockfile["default"]["funcsigs"] @flaky @@ -109,7 +118,7 @@ def test_global_overrides_environment_markers(pipenv_instance_private_pypi): also specified as an unconditional dep, its markers should be empty. """ with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" @@ -122,10 +131,10 @@ def test_global_overrides_environment_markers(pipenv_instance_private_pypi): """.strip() f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - assert p.lockfile['default']['funcsigs'].get('markers', '') == '' + assert p.lockfile["default"]["funcsigs"].get("markers", "") == "" @flaky @@ -140,13 +149,13 @@ def test_resolver_unique_markers(pipenv_instance_pypi): This verifies that we clean that successfully. """ with pipenv_instance_pypi() as p: - c = p.pipenv('install vcrpy==2.0.1') + c = p.pipenv("install vcrpy==2.0.1") assert c.returncode == 0 - assert 'yarl' in p.lockfile['default'] - yarl = p.lockfile['default']['yarl'] - assert 'markers' in yarl + assert "yarl" in p.lockfile["default"] + yarl = p.lockfile["default"]["yarl"] + assert "markers" in yarl # Two possible marker sets are ok here - assert yarl['markers'] in [ + assert yarl["markers"] in [ "python_version in '3.4, 3.5, 3.6'", "python_version >= '3.4'", "python_version >= '3.5'", # yarl 1.3.0 requires python 3.5.3 @@ -158,8 +167,9 @@ def test_resolver_unique_markers(pipenv_instance_pypi): @pytest.mark.needs_internet def test_environment_variable_value_does_not_change_hash(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p, temp_environ(): - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [[source]] url = 'https://${PYPI_USERNAME}:${PYPI_PASSWORD}@pypi.org/simple' verify_ssl = true @@ -167,14 +177,15 @@ def test_environment_variable_value_does_not_change_hash(pipenv_instance_private [packages] six = "*" -""") +""" + ) project = Project() - os.environ['PYPI_USERNAME'] = 'whatever' - os.environ['PYPI_PASSWORD'] = 'pass' + os.environ["PYPI_USERNAME"] = "whatever" + os.environ["PYPI_PASSWORD"] = "pass" assert project.get_lockfile_hash() is None - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 lock_hash = project.get_lockfile_hash() assert lock_hash is not None @@ -183,9 +194,9 @@ def test_environment_variable_value_does_not_change_hash(pipenv_instance_private assert c.returncode == 0 assert project.get_lockfile_hash() == project.calculate_pipfile_hash() - os.environ['PYPI_PASSWORD'] = 'pass2' + os.environ["PYPI_PASSWORD"] = "pass2" assert project.get_lockfile_hash() == project.calculate_pipfile_hash() - with open(p.pipfile_path, 'a') as f: + with open(p.pipfile_path, "a") as f: f.write('requests = "==2.14.0"\n') assert project.get_lockfile_hash() != project.calculate_pipfile_hash() diff --git a/tests/integration/test_install_misc.py b/tests/integration/test_install_misc.py index 1fb0ff50e..b7d532b89 100644 --- a/tests/integration/test_install_misc.py +++ b/tests/integration/test_install_misc.py @@ -10,7 +10,7 @@ def test_install_uri_with_extras(pipenv_instance_pypi): server = DEFAULT_PRIVATE_PYPI_SERVER.replace("/simple", "") file_uri = f"{server}/packages/plette/plette-0.2.2-py2.py3-none-any.whl" with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py index b4db8555e..c6e30aff3 100644 --- a/tests/integration/test_install_twists.py +++ b/tests/integration/test_install_twists.py @@ -40,8 +40,7 @@ def test_local_path_issue_6016(pipenv_instance_pypi): @pytest.mark.install @pytest.mark.local def test_local_extras_install(pipenv_instance_pypi): - """Ensure -e .[extras] installs. - """ + """Ensure -e .[extras] installs.""" with pipenv_instance_pypi() as p: setup_py = os.path.join(p.path, "setup.py") with open(setup_py, "w") as fh: @@ -62,13 +61,15 @@ def test_local_extras_install(pipenv_instance_pypi): """.strip() fh.write(contents) line = "-e .[dev]" - with open(os.path.join(p.path, 'Pipfile'), 'w') as fh: - fh.write(""" + with open(os.path.join(p.path, "Pipfile"), "w") as fh: + fh.write( + """ [packages] testpipenv = {path = ".", editable = true, extras = ["dev"]} [dev-packages] - """.strip()) + """.strip() + ) # project.write_toml({"packages": pipfile, "dev-packages": {}}) c = p.pipenv("install") assert c.returncode == 0 @@ -115,20 +116,22 @@ def helper_dependency_links_install_make_setup(pipenv_instance, deplink): @staticmethod def helper_dependency_links_install_test(pipenv_instance, deplink): - TestDirectDependencies.helper_dependency_links_install_make_setup(pipenv_instance, deplink) + TestDirectDependencies.helper_dependency_links_install_make_setup( + pipenv_instance, deplink + ) c = pipenv_instance.pipenv("install -v -e .") assert c.returncode == 0 assert "six" in pipenv_instance.lockfile["default"] - @pytest.mark.skip(reason="This test modifies os.environment which has side effects on other tests") + @pytest.mark.skip( + reason="This test modifies os.environment which has side effects on other tests" + ) def test_https_dependency_links_install(self, pipenv_instance_pypi): - """Ensure dependency_links are parsed and installed (needed for private repo dependencies). - """ + """Ensure dependency_links are parsed and installed (needed for private repo dependencies).""" with temp_environ(), pipenv_instance_pypi() as p: - os.environ["PIP_NO_BUILD_ISOLATION"] = '1' + os.environ["PIP_NO_BUILD_ISOLATION"] = "1" TestDirectDependencies.helper_dependency_links_install_test( - p, - 'six@ git+https://github.com/benjaminp/six@1.11.0' + p, "six@ git+https://github.com/benjaminp/six@1.11.0" ) @@ -177,8 +180,8 @@ def test_local_package(pipenv_instance_private_pypi, testsroot): import tarfile with tarfile.open(copy_to, "r:gz") as tgz: - def is_within_directory(directory, target): + def is_within_directory(directory, target): abs_directory = os.path.abspath(directory) abs_target = os.path.abspath(target) @@ -187,7 +190,6 @@ def is_within_directory(directory, target): return prefix == abs_directory def safe_extract(tar, path=".", members=None, *, numeric_owner=False): - for member in tar.getmembers(): member_path = os.path.join(path, member.name) if not is_within_directory(path, member_path): @@ -195,7 +197,6 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): tar.extractall(path, members, numeric_owner) - safe_extract(tgz, path=p.path) c = p.pipenv(f"install -e {package}") assert c.returncode == 0 @@ -253,7 +254,9 @@ def test_install_local_uri_special_character(pipenv_instance_private_pypi, tests @pytest.mark.run @pytest.mark.files @pytest.mark.install -def test_multiple_editable_packages_should_not_race(pipenv_instance_private_pypi, testsroot): +def test_multiple_editable_packages_should_not_race( + pipenv_instance_private_pypi, testsroot +): """Test for a race condition that can occur when installing multiple 'editable' packages at once, and which causes some of them to not be importable. @@ -280,12 +283,14 @@ def test_multiple_editable_packages_should_not_race(pipenv_instance_private_pypi source_path = p._pipfile.get_fixture_path(f"git/{pkg_name}/") shutil.copytree(source_path, pkg_name) - pipfile_string += f'"{pkg_name}" = {{path = "./{pkg_name}", editable = true}}\n' + pipfile_string += ( + f'"{pkg_name}" = {{path = "./{pkg_name}", editable = true}}\n' + ) - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: f.write(pipfile_string.strip()) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 c = p.pipenv('run python -c "import jinja2, six"') @@ -293,11 +298,13 @@ def test_multiple_editable_packages_should_not_race(pipenv_instance_private_pypi @pytest.mark.skipif( - os.name == 'nt' and sys.version_info[:2] == (3, 8), - reason="Seems to work on 3.8 but not via the CI" + os.name == "nt" and sys.version_info[:2] == (3, 8), + reason="Seems to work on 3.8 but not via the CI", ) @pytest.mark.outdated -def test_outdated_should_compare_postreleases_without_failing(pipenv_instance_private_pypi): +def test_outdated_should_compare_postreleases_without_failing( + pipenv_instance_private_pypi, +): with pipenv_instance_private_pypi() as p: c = p.pipenv("install ibm-db-sa-py3==0.3.0") assert c.returncode == 0 @@ -310,21 +317,26 @@ def test_outdated_should_compare_postreleases_without_failing(pipenv_instance_pr assert "out-of-date" in c.stdout -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_install_remote_wheel_file_with_extras(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv("install fastapi[dev]@https://files.pythonhosted.org/packages/4e/1a/04887c641b67e6649bde845b9a631f73a7abfbe3afda83957e09b95d88eb/fastapi-0.95.2-py3-none-any.whl") + c = p.pipenv( + "install fastapi[dev]@https://files.pythonhosted.org/packages/4e/1a/04887c641b67e6649bde845b9a631f73a7abfbe3afda83957e09b95d88eb/fastapi-0.95.2-py3-none-any.whl" + ) assert c.returncode == 0 assert "ruff" in p.lockfile["default"] assert "pre-commit" in p.lockfile["default"] assert "uvicorn" in p.lockfile["default"] + @pytest.mark.install @pytest.mark.skip_lock @pytest.mark.needs_internet def test_install_skip_lock(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{}" @@ -332,9 +344,11 @@ def test_install_skip_lock(pipenv_instance_private_pypi): name = "pypi" [packages] six = {} - """.format(p.index_url, '{version = "*", index = "pypi"}').strip() + """.format( + p.index_url, '{version = "*", index = "pypi"}' + ).strip() f.write(contents) - c = p.pipenv('install --skip-lock') + c = p.pipenv("install --skip-lock") assert c.returncode == 0 c = p.pipenv('run python -c "import six"') assert c.returncode == 0 diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 71f89f2e2..745138dd6 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -12,18 +12,22 @@ @pytest.mark.needs_internet def test_basic_vcs_install_with_env_var(pipenv_instance_pypi): from pipenv.cli import cli - from click.testing import CliRunner # not thread safe but macos and linux will expand the env var otherwise + from click.testing import ( + CliRunner, + ) # not thread safe but macos and linux will expand the env var otherwise with pipenv_instance_pypi() as p: # edge case where normal package starts with VCS name shouldn't be flagged as vcs os.environ["GIT_HOST"] = "github.com" cli_runner = CliRunner(mix_stderr=False) - c = cli_runner.invoke(cli, "install -v git+https://${GIT_HOST}/benjaminp/six.git@1.11.0 gitdb2") + c = cli_runner.invoke( + cli, "install -v git+https://${GIT_HOST}/benjaminp/six.git@1.11.0 gitdb2" + ) assert c.exit_code == 0 assert all(package in p.pipfile["packages"] for package in ["six", "gitdb2"]) assert "git" in p.pipfile["packages"]["six"] assert p.lockfile["default"]["six"] == { - "git": "git+https://${GIT_HOST}/benjaminp/six.git", + "git": "https://${GIT_HOST}/benjaminp/six.git", "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "ref": "15e31431af97e5e64b80af0a3f598d382bcdd49a", } @@ -37,9 +41,7 @@ def test_urls_work(pipenv_instance_pypi): with pipenv_instance_pypi() as p: # the library this installs is "django-cms" url = "https://github.com/lidatong/dataclasses-json/archive/refs/tags/v0.5.7.zip" - c = p.pipenv( - f"install {url}" - ) + c = p.pipenv(f"install {url}") assert c.returncode == 0 dep = list(p.pipfile["packages"].values())[0] @@ -53,7 +55,12 @@ def test_urls_work(pipenv_instance_pypi): @pytest.mark.files def test_file_urls_work(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - whl = Path(Path(__file__).resolve().parent.parent / "pypi" / "six" / "six-1.11.0-py2.py3-none-any.whl") + whl = Path( + Path(__file__).resolve().parent.parent + / "pypi" + / "six" + / "six-1.11.0-py2.py3-none-any.whl" + ) try: whl = whl.resolve() @@ -64,12 +71,11 @@ def test_file_urls_work(pipenv_instance_pypi): assert c.returncode == 0 assert "six" in p.pipfile["packages"] assert "file" in p.pipfile["packages"]["six"] - assert 'six' in p.lockfile["default"] - assert 'file' in p.lockfile["default"]["six"] + assert "six" in p.lockfile["default"] + assert "file" in p.lockfile["default"]["six"] assert "six-1.11.0-py2.py3-none-any.whl" in p.lockfile["default"]["six"]["file"] - @pytest.mark.e @pytest.mark.vcs @pytest.mark.urls @@ -90,16 +96,14 @@ def test_vcs_install(pipenv_instance_pypi): @pytest.mark.needs_internet def test_install_git_tag(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - c = p.pipenv( - "install git+https://github.com/benjaminp/six.git@1.11.0" - ) + c = p.pipenv("install git+https://github.com/benjaminp/six.git@1.11.0") assert c.returncode == 0 assert "six" in p.pipfile["packages"] assert "six" in p.lockfile["default"] assert "git" in p.lockfile["default"]["six"] assert ( p.lockfile["default"]["six"]["git"] - == "git+https://github.com/benjaminp/six.git" + == "https://github.com/benjaminp/six.git" ) assert "ref" in p.lockfile["default"]["six"] @@ -108,7 +112,9 @@ def test_install_git_tag(pipenv_instance_private_pypi): @pytest.mark.index @pytest.mark.install @pytest.mark.needs_internet -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_install_named_index_alias(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: with open(p.pipfile_path, "w") as f: @@ -137,7 +143,9 @@ def test_install_named_index_alias(pipenv_instance_private_pypi): @pytest.mark.index @pytest.mark.install @pytest.mark.needs_internet -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_install_specifying_index_url(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: with open(p.pipfile_path, "w") as f: @@ -156,7 +164,9 @@ def test_install_specifying_index_url(pipenv_instance_private_pypi): install_search_all_sources = true """.strip() f.write(contents) - c = p.pipenv("install pipenv-test-private-package --index https://test.pypi.org/simple") + c = p.pipenv( + "install pipenv-test-private-package --index https://test.pypi.org/simple" + ) assert c.returncode == 0 pipfile = p.pipfile assert pipfile["source"][1]["url"] == "https://test.pypi.org/simple" @@ -187,9 +197,7 @@ def test_install_local_vcs_not_in_lockfile(pipenv_instance_pypi): @pytest.mark.needs_internet def test_get_vcs_refs(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - c = p.pipenv( - "install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six" - ) + c = p.pipenv("install -e git+https://github.com/benjaminp/six.git@1.9.0#egg=six") assert c.returncode == 0 assert "six" in p.pipfile["packages"] assert "six" in p.lockfile["default"] @@ -239,8 +247,13 @@ def test_vcs_entry_supersedes_non_vcs(pipenv_instance_pypi): installed_packages = ["Flask", "Jinja2"] assert all(k in p.pipfile["packages"] for k in installed_packages) assert all(k.lower() in p.lockfile["default"] for k in installed_packages) - assert all(k in p.lockfile["default"]["jinja2"] for k in ["ref", "git"]), str(p.lockfile["default"]) - assert p.lockfile["default"]["jinja2"].get("ref") == "bbdafe33ce9f47e3cbfb9415619e354349f11243" + assert all(k in p.lockfile["default"]["jinja2"] for k in ["ref", "git"]), str( + p.lockfile["default"] + ) + assert ( + p.lockfile["default"]["jinja2"].get("ref") + == "bbdafe33ce9f47e3cbfb9415619e354349f11243" + ) assert p.lockfile["default"]["jinja2"]["git"] == f"{jinja2_uri}" @@ -251,7 +264,14 @@ def test_vcs_entry_supersedes_non_vcs(pipenv_instance_pypi): def test_vcs_can_use_markers(pipenv_instance_pypi): with pipenv_instance_pypi() as p: path = p._pipfile.get_fixture_path("git/six/") - p._pipfile.install("six", {"git": f"{path.as_uri()}", "ref": "1.11.0", "markers": "sys_platform == 'linux'"}) + p._pipfile.install( + "six", + { + "git": f"{path.as_uri()}", + "ref": "1.11.0", + "markers": "sys_platform == 'linux'", + }, + ) assert "six" in p.pipfile["packages"] c = p.pipenv("install") assert c.returncode == 0 diff --git a/tests/integration/test_install_vcs.py b/tests/integration/test_install_vcs.py new file mode 100644 index 000000000..d32e079d9 --- /dev/null +++ b/tests/integration/test_install_vcs.py @@ -0,0 +1,10 @@ +import pytest + + +@pytest.mark.basic +@pytest.mark.install +def test_install_github_vcs(pipenv_instance_pypi): + with pipenv_instance_pypi() as p: + c = p.pipenv("install git+https://github.com/reagento/adaptix.git@2.16") + assert not c.returncode + assert "dataclass-factory" in p.pipfile["packages"] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 4d6fc3387..7f3ed3af2 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -11,11 +11,11 @@ @pytest.mark.lock @pytest.mark.requirements def test_lock_handle_eggs(pipenv_instance_private_pypi): - """Ensure locking works with packages providing egg formats. - """ + """Ensure locking works with packages providing egg formats.""" with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: - f.write(f""" + with open(p.pipfile_path, "w") as f: + f.write( + f""" [[source]] url = "{p.index_url}" verify_ssl = false @@ -23,11 +23,12 @@ def test_lock_handle_eggs(pipenv_instance_private_pypi): [packages] RandomWords = "*" - """) - c = p.pipenv('lock --verbose') + """ + ) + c = p.pipenv("lock --verbose") assert c.returncode == 0 - assert 'randomwords' in p.lockfile['default'] - assert p.lockfile['default']['randomwords']['version'] == '==0.2.1' + assert "randomwords" in p.lockfile["default"] + assert p.lockfile["default"]["randomwords"]["version"] == "==0.2.1" @pytest.mark.lock @@ -48,7 +49,9 @@ def test_lock_gathers_pyproject_dependencies(pipenv_instance_pypi): f.write(contents) # Write the pyproject.toml - pyproject_toml_path = os.path.join(os.path.dirname(p.pipfile_path), "pyproject.toml") + pyproject_toml_path = os.path.join( + os.path.dirname(p.pipfile_path), "pyproject.toml" + ) with open(pyproject_toml_path, "w") as f: contents = """ [build-system] @@ -69,13 +72,11 @@ def test_lock_gathers_pyproject_dependencies(pipenv_instance_pypi): assert "six" in p.lockfile["default"] - @pytest.mark.lock @pytest.mark.requirements def test_lock_requirements_file(pipenv_instance_private_pypi): - with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] urllib3 = "==1.23" @@ -88,12 +89,12 @@ def test_lock_requirements_file(pipenv_instance_private_pypi): dev_req_list = ("colorama==0.3.9",) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - default = p.pipenv('requirements') + default = p.pipenv("requirements") assert default.returncode == 0 - dev = p.pipenv('requirements --dev-only') + dev = p.pipenv("requirements --dev-only") for req in req_list: assert req in default.stdout @@ -104,19 +105,20 @@ def test_lock_requirements_file(pipenv_instance_private_pypi): @pytest.mark.lock def test_lock_includes_hashes_for_all_platforms(pipenv_instance_private_pypi): - """ Locking should include hashes for *all* platforms, not just the - platform we're running lock on. """ + """Locking should include hashes for *all* platforms, not just the + platform we're running lock on.""" - #releases = pytest_pypi.app.packages['yarl'].releases + # releases = pytest_pypi.app.packages['yarl'].releases + releases = { + "yarl-1.3.0-cp35-cp35m-manylinux1_x86_64.whl": "3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb", + "yarl-1.3.0-cp35-cp35m-win_amd64.whl": "b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310", + "yarl-1.3.0-cp36-cp36m-manylinux1_x86_64.whl": "5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842", + "yarl-1.3.0-cp36-cp36m-win_amd64.whl": "c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4", + "yarl-1.3.0-cp37-cp37m-win_amd64.whl": "73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0", + "yarl-1.3.0.tar.gz": "024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9", + } - releases = {'yarl-1.3.0-cp35-cp35m-manylinux1_x86_64.whl': '3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb', - 'yarl-1.3.0-cp35-cp35m-win_amd64.whl': 'b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310', - 'yarl-1.3.0-cp36-cp36m-manylinux1_x86_64.whl': '5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842', - 'yarl-1.3.0-cp36-cp36m-win_amd64.whl': 'c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4', - 'yarl-1.3.0-cp37-cp37m-win_amd64.whl': '73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0', - 'yarl-1.3.0.tar.gz': '024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9', - } def get_hash(release_name): # Convert a specific filename to a hash like what would show up in a Pipfile.lock. # For example: @@ -124,7 +126,7 @@ def get_hash(release_name): return f"sha256:{releases[release_name]}" with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" @@ -136,18 +138,18 @@ def get_hash(release_name): """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 lock = p.lockfile - assert 'yarl' in lock['default'] - assert set(lock['default']['yarl']['hashes']) == { - get_hash('yarl-1.3.0-cp35-cp35m-manylinux1_x86_64.whl'), - get_hash('yarl-1.3.0-cp35-cp35m-win_amd64.whl'), - get_hash('yarl-1.3.0-cp36-cp36m-manylinux1_x86_64.whl'), - get_hash('yarl-1.3.0-cp36-cp36m-win_amd64.whl'), - get_hash('yarl-1.3.0-cp37-cp37m-win_amd64.whl'), - get_hash('yarl-1.3.0.tar.gz'), + assert "yarl" in lock["default"] + assert set(lock["default"]["yarl"]["hashes"]) == { + get_hash("yarl-1.3.0-cp35-cp35m-manylinux1_x86_64.whl"), + get_hash("yarl-1.3.0-cp35-cp35m-win_amd64.whl"), + get_hash("yarl-1.3.0-cp36-cp36m-manylinux1_x86_64.whl"), + get_hash("yarl-1.3.0-cp36-cp36m-win_amd64.whl"), + get_hash("yarl-1.3.0-cp37-cp37m-win_amd64.whl"), + get_hash("yarl-1.3.0.tar.gz"), } @@ -157,7 +159,10 @@ def test_resolve_skip_unmatched_requirements(pipenv_instance_pypi): p._pipfile.add("missing-package", {"markers": "os_name=='FakeOS'"}) c = p.pipenv("lock") assert c.returncode == 0 - assert 'Could not find a matching version of missing-package; os_name == "FakeOS"' in c.stderr + assert ( + 'Could not find a matching version of missing-package; os_name == "FakeOS"' + in c.stderr + ) @pytest.mark.lock @@ -167,38 +172,40 @@ def test_complex_lock_with_vcs_deps(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() dateutil_uri = p._pipfile.get_fixture_path("git/dateutil").as_uri() - with open(p.pipfile_path, 'w') as f: - contents = """ + with open(p.pipfile_path, "w") as f: + contents = ( + """ [packages] click = "==6.7" [dev-packages] requests = {git = "%s"} - """.strip() % requests_uri + """.strip() + % requests_uri + ) f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 lock = p.lockfile - assert 'requests' in lock['develop'] - assert 'click' in lock['default'] + assert "requests" in lock["develop"] + assert "click" in lock["default"] - c = p.pipenv(f'run pip install -e git+{dateutil_uri}#egg=python_dateutil') + c = p.pipenv(f"run pip install -e git+{dateutil_uri}#egg=python_dateutil") assert c.returncode == 0 lock = p.lockfile - assert 'requests' in lock['develop'] - assert 'click' in lock['default'] - assert 'python_dateutil' not in lock['default'] - assert 'python_dateutil' not in lock['develop'] + assert "requests" in lock["develop"] + assert "click" in lock["default"] + assert "python_dateutil" not in lock["default"] + assert "python_dateutil" not in lock["develop"] @pytest.mark.lock @pytest.mark.requirements def test_lock_with_prereleases(pipenv_instance_private_pypi): - with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] sqlalchemy = "==1.2.0b3" @@ -208,9 +215,9 @@ def test_lock_with_prereleases(pipenv_instance_private_pypi): """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - assert p.lockfile['default']['sqlalchemy']['version'] == '==1.2.0b3' + assert p.lockfile["default"]["sqlalchemy"]["version"] == "==1.2.0b3" @pytest.mark.lock @@ -220,17 +227,17 @@ def test_lock_with_prereleases(pipenv_instance_private_pypi): @flaky def test_complex_deps_lock_and_install_properly(pipenv_instance_pypi): # This uses the real PyPI because Maya has too many dependencies... - with pipenv_instance_pypi() as p, open(p.pipfile_path, 'w') as f: + with pipenv_instance_pypi() as p, open(p.pipfile_path, "w") as f: contents = """ [packages] maya = "*" """.strip() f.write(contents) - c = p.pipenv('lock --verbose') + c = p.pipenv("lock --verbose") assert c.returncode == 0 - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 @@ -238,22 +245,22 @@ def test_complex_deps_lock_and_install_properly(pipenv_instance_pypi): @pytest.mark.extras def test_lock_extras_without_install(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] requests = {version = "*", extras = ["socks"]} """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 assert "requests" in p.lockfile["default"] assert "pysocks" in p.lockfile["default"] - assert "markers" not in p.lockfile["default"]['pysocks'] + assert "markers" not in p.lockfile["default"]["pysocks"] - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - c = p.pipenv('requirements') + c = p.pipenv("requirements") assert c.returncode == 0 assert "extra == 'socks'" not in c.stdout.strip() @@ -265,7 +272,7 @@ def test_lock_extras_without_install(pipenv_instance_private_pypi): @pytest.mark.needs_internet def test_private_index_lock_requirements(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "https://pypi.org/simple" @@ -281,7 +288,7 @@ def test_private_index_lock_requirements(pipenv_instance_private_pypi): pipenv-test-private-package = {version = "*", index = "testpypi"} """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 @@ -290,9 +297,11 @@ def test_private_index_lock_requirements(pipenv_instance_private_pypi): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.requirements @pytest.mark.needs_internet -def test_private_index_lock_requirements_for_not_canonical_package(pipenv_instance_private_pypi): +def test_private_index_lock_requirements_for_not_canonical_package( + pipenv_instance_private_pypi, +): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "https://pypi.org/simple" @@ -308,16 +317,15 @@ def test_private_index_lock_requirements_for_not_canonical_package(pipenv_instan pipenv_test_private_package = {version = "*", index = "testpypi"} """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 @pytest.mark.index @pytest.mark.install def test_lock_updated_source(pipenv_instance_private_pypi): - with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{url}/${{MY_ENV_VAR}}" @@ -326,16 +334,18 @@ def test_lock_updated_source(pipenv_instance_private_pypi): [packages] requests = "==2.14.0" - """.strip().format(url=p.pypi) + """.strip().format( + url=p.pypi + ) f.write(contents) with temp_environ(): - os.environ['MY_ENV_VAR'] = 'simple' - c = p.pipenv('lock') + os.environ["MY_ENV_VAR"] = "simple" + c = p.pipenv("lock") assert c.returncode == 0 - assert 'requests' in p.lockfile['default'] + assert "requests" in p.lockfile["default"] - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "{url}/simple" @@ -344,12 +354,14 @@ def test_lock_updated_source(pipenv_instance_private_pypi): [packages] requests = "==2.14.0" - """.strip().format(url=p.pypi) + """.strip().format( + url=p.pypi + ) f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - assert 'requests' in p.lockfile['default'] + assert "requests" in p.lockfile["default"] @pytest.mark.vcs @@ -358,14 +370,17 @@ def test_lock_updated_source(pipenv_instance_private_pypi): def test_lock_editable_vcs_without_install(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/six").as_uri() - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] six = {git = "%s", editable = true} - """.strip() % requests_uri) - c = p.pipenv('lock') + """.strip() + % requests_uri + ) + c = p.pipenv("lock") assert c.returncode == 0 - assert 'six' in p.lockfile['default'] + assert "six" in p.lockfile["default"] @pytest.mark.vcs @@ -374,15 +389,21 @@ def test_lock_editable_vcs_without_install(pipenv_instance_private_pypi): def test_lock_editable_vcs_with_ref_in_git(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] requests = {git = "%s@883caaf", editable = true} - """.strip() % requests_uri) - c = p.pipenv('lock') + """.strip() + % requests_uri + ) + c = p.pipenv("lock") assert c.returncode == 0 - assert requests_uri in p.lockfile['default']['requests']['git'] - assert p.lockfile['default']['requests']['ref'] == '883caaf145fbe93bd0d208a6b864de9146087312' + assert requests_uri in p.lockfile["default"]["requests"]["git"] + assert ( + p.lockfile["default"]["requests"]["ref"] + == "883caaf145fbe93bd0d208a6b864de9146087312" + ) @pytest.mark.vcs @@ -392,16 +413,19 @@ def test_lock_editable_vcs_with_ref_in_git(pipenv_instance_private_pypi): def test_lock_editable_vcs_with_extras_without_install(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] requests = {git = "%s", editable = true, extras = ["socks"]} - """.strip() % requests_uri) - c = p.pipenv('lock') + """.strip() + % requests_uri + ) + c = p.pipenv("lock") assert c.returncode == 0 - assert 'requests' in p.lockfile['default'] - assert 'idna' in p.lockfile['default'] - assert 'certifi' in p.lockfile['default'] + assert "requests" in p.lockfile["default"] + assert "idna" in p.lockfile["default"] + assert "certifi" in p.lockfile["default"] assert "socks" in p.lockfile["default"]["requests"]["extras"] assert "version" not in p.lockfile["default"]["requests"] @@ -412,16 +436,19 @@ def test_lock_editable_vcs_with_extras_without_install(pipenv_instance_private_p def test_lock_editable_vcs_with_markers_without_install(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] requests = {git = "%s", editable = true, markers = "python_version >= '2.6'"} - """.strip() % requests_uri) - c = p.pipenv('lock') + """.strip() + % requests_uri + ) + c = p.pipenv("lock") assert c.returncode == 0 - assert 'requests' in p.lockfile['default'] - assert 'idna' in p.lockfile['default'] - assert 'certifi' in p.lockfile['default'] + assert "requests" in p.lockfile["default"] + assert "idna" in p.lockfile["default"] + assert "certifi" in p.lockfile["default"] assert c.returncode == 0 @@ -429,22 +456,22 @@ def test_lock_editable_vcs_with_markers_without_install(pipenv_instance_private_ @pytest.mark.install def test_lockfile_corrupted(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: - f.write('{corrupted}') - c = p.pipenv('install') + with open(p.lockfile_path, "w") as f: + f.write("{corrupted}") + c = p.pipenv("install") assert c.returncode == 0 - assert 'Pipfile.lock is corrupted' in c.stderr - assert p.lockfile['_meta'] + assert "Pipfile.lock is corrupted" in c.stderr + assert p.lockfile["_meta"] @pytest.mark.lock def test_lockfile_with_empty_dict(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: - f.write('{}') - c = p.pipenv('install') + with open(p.lockfile_path, "w") as f: + f.write("{}") + c = p.pipenv("install") assert c.returncode == 0 - assert p.lockfile['_meta'] + assert p.lockfile["_meta"] @pytest.mark.vcs @@ -454,10 +481,9 @@ def test_vcs_lock_respects_top_level_pins(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: requests_uri = p._pipfile.get_fixture_path("git/requests").as_uri() - p._pipfile.add("requests", { - "editable": True, "git": f"{requests_uri}", - "ref": "v2.18.4" - }) + p._pipfile.add( + "requests", {"editable": True, "git": f"{requests_uri}", "ref": "v2.18.4"} + ) p._pipfile.add("urllib3", "==1.21.1") c = p.pipenv("lock") assert c.returncode == 0 @@ -479,17 +505,19 @@ def test_lock_after_update_source_name(pipenv_instance_pypi): [packages] six = "*" """.strip() - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: f.write(contents) c = p.pipenv("lock") assert c.returncode == 0 assert p.lockfile["default"]["six"]["index"] == "test" - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: f.write(contents.replace('name = "test"', 'name = "custom"')) c = p.pipenv("lock --clear") assert c.returncode == 0 assert "index" in p.lockfile["default"]["six"] - assert p.lockfile["default"]["six"]["index"] == "custom", Path(p.lockfile_path).read_text() + assert p.lockfile["default"]["six"]["index"] == "custom", Path( + p.lockfile_path + ).read_text() @pytest.mark.lock @@ -509,7 +537,7 @@ def test_lock_nested_direct_url(pipenv_instance_private_pypi): [packages] test_package = "*" """.strip() - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: f.write(contents) c = p.pipenv("lock") assert c.returncode == 0 @@ -522,18 +550,25 @@ def test_lock_nested_direct_url(pipenv_instance_private_pypi): @pytest.mark.needs_internet def test_lock_nested_vcs_direct_url(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - p._pipfile.add("pep508_package", { - "git": "https://github.com/techalchemy/test-project.git", - "editable": True, "ref": "master", - "subdirectory": "parent_folder/pep508-package" - }) + p._pipfile.add( + "pep508_package", + { + "git": "https://github.com/techalchemy/test-project.git", + "editable": True, + "ref": "master", + "subdirectory": "parent_folder/pep508-package", + }, + ) c = p.pipenv("lock") assert c.returncode == 0 assert "git" in p.lockfile["default"]["pep508-package"] assert "sibling-package" in p.lockfile["default"] assert "git" in p.lockfile["default"]["sibling-package"] assert "subdirectory" in p.lockfile["default"]["sibling-package"] - assert p.lockfile["default"]["sibling-package"]["subdirectory"] == "parent_folder/sibling_package" + assert ( + p.lockfile["default"]["sibling-package"]["subdirectory"] + == "parent_folder/sibling_package" + ) assert "version" not in p.lockfile["default"]["sibling-package"] @@ -568,7 +603,7 @@ def test_default_lock_overwrite_dev_lock(pipenv_instance_pypi): @pytest.mark.needs_internet def test_pipenv_respects_package_index_restrictions(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "https://pypi.org/simple" @@ -582,21 +617,28 @@ def test_pipenv_respects_package_index_restrictions(pipenv_instance_private_pypi [packages] requests = {requirement} - """.strip().format(url=p.index_url, requirement='{version="*", index="local"}') + """.strip().format( + url=p.index_url, requirement='{version="*", index="local"}' + ) f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - assert 'requests' in p.lockfile['default'] - assert 'idna' in p.lockfile['default'] - assert 'certifi' in p.lockfile['default'] - assert 'urllib3' in p.lockfile['default'] - assert 'chardet' in p.lockfile['default'] + assert "requests" in p.lockfile["default"] + assert "idna" in p.lockfile["default"] + assert "certifi" in p.lockfile["default"] + assert "urllib3" in p.lockfile["default"] + assert "chardet" in p.lockfile["default"] # this is the newest version we have in our private pypi (but pypi.org has 2.27.1 at present) - expected_result = {'hashes': ['sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1', - 'sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a'], - 'index': 'local', 'version': '==2.19.1'} - assert p.lockfile['default']['requests'] == expected_result + expected_result = { + "hashes": [ + "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", + "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a", + ], + "index": "local", + "version": "==2.19.1", + } + assert p.lockfile["default"]["requests"] == expected_result @pytest.mark.dev @@ -606,7 +648,7 @@ def test_dev_lock_use_default_packages_as_constraint(pipenv_instance_private_pyp # See https://github.com/pypa/pipenv/issues/4371 # See https://github.com/pypa/pipenv/issues/2987 with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" @@ -650,7 +692,7 @@ def test_lock_specific_named_category(pipenv_instance_private_pypi): [prereq] six = "*" """.strip() - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: f.write(contents) c = p.pipenv("lock --categories prereq") assert c.returncode == 0 @@ -661,6 +703,7 @@ def test_lock_specific_named_category(pipenv_instance_private_pypi): assert p.lockfile["prereq"]["six"]["index"] == "test" assert p.lockfile["default"]["requests"]["index"] == "test" + def test_pinned_pipfile_no_null_markers_when_extras(pipenv_instance_pypi): with pipenv_instance_pypi() as p: with open(p.pipfile_path, "w") as f: @@ -675,13 +718,14 @@ def test_pinned_pipfile_no_null_markers_when_extras(pipenv_instance_pypi): assert "dataclasses-json" in p.lockfile["default"] assert p.lockfile["default"]["dataclasses-json"].get("markers", "") is not None + @pytest.mark.index @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.skip_lock @pytest.mark.needs_internet def test_private_index_skip_lock(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [[source]] url = "https://pypi.org/simple" @@ -700,5 +744,5 @@ def test_private_index_skip_lock(pipenv_instance_private_pypi): install_search_all_sources = true """.strip() f.write(contents) - c = p.pipenv('install --skip-lock') + c = p.pipenv("install --skip-lock") assert c.returncode == 0 diff --git a/tests/integration/test_lockfile.py b/tests/integration/test_lockfile.py index 2617fa883..145c291d1 100644 --- a/tests/integration/test_lockfile.py +++ b/tests/integration/test_lockfile.py @@ -21,13 +21,11 @@ def pypi_lockfile(): yield lockfile -def test_git_branch_contains_slashes( - pipenv_instance_pypi, pypi_lockfile -): +def test_git_branch_contains_slashes(pipenv_instance_pypi, pypi_lockfile): pypi_lockfile["default"]["google-api-python-client"] = { "git": "https://github.com/thehesiod/google-api-python-client.git@thehesiod/batch-retries2", "markers": "python_version >= '3.7'", - "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706" + "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706", } with pipenv_instance_pypi() as p: @@ -40,14 +38,16 @@ def test_git_branch_contains_slashes( pip_installable_lines = requirements.requirements_from_lockfile( deps, include_hashes=False, include_markers=True ) - assert pip_installable_lines == ["google-api-python-client@ git+https://github.com/thehesiod/google-api-python-client.git@03803c21fc13a345e978f32775b2f2fa23c8e706"] + assert pip_installable_lines == [ + "google-api-python-client@ git+https://github.com/thehesiod/google-api-python-client.git@03803c21fc13a345e978f32775b2f2fa23c8e706" + ] def test_git_branch_contains_subdirectory_fragment(pipenv_instance_pypi, pypi_lockfile): pypi_lockfile["default"]["pep508_package"] = { "git": "https://github.com/techalchemy/test-project.git@master", "subdirectory": "parent_folder/pep508-package", - "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706" + "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706", } with pipenv_instance_pypi() as p: @@ -60,4 +60,6 @@ def test_git_branch_contains_subdirectory_fragment(pipenv_instance_pypi, pypi_lo pip_installable_lines = requirements.requirements_from_lockfile( deps, include_hashes=False, include_markers=True ) - assert pip_installable_lines == ['pep508_package@ git+https://github.com/techalchemy/test-project.git@03803c21fc13a345e978f32775b2f2fa23c8e706#subdirectory=parent_folder/pep508-package'] + assert pip_installable_lines == [ + "pep508_package@ git+https://github.com/techalchemy/test-project.git@03803c21fc13a345e978f32775b2f2fa23c8e706#subdirectory=parent_folder/pep508-package" + ] diff --git a/tests/integration/test_pipenv.py b/tests/integration/test_pipenv.py index 3fb69abda..e0ec23b1a 100644 --- a/tests/integration/test_pipenv.py +++ b/tests/integration/test_pipenv.py @@ -15,9 +15,8 @@ @pytest.mark.lock @pytest.mark.deploy def test_deploy_works(pipenv_instance_pypi): - with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] dataclasses-json = "==0.5.7" @@ -27,16 +26,16 @@ def test_deploy_works(pipenv_instance_pypi): pytest = "==4.6.9" """.strip() f.write(contents) - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] dataclasses-json = "==0.5.7" """.strip() f.write(contents) - c = p.pipenv('install --deploy') + c = p.pipenv("install --deploy") assert c.returncode > 0 @@ -44,29 +43,29 @@ def test_deploy_works(pipenv_instance_pypi): @pytest.mark.lock def test_update_locks(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: - c = p.pipenv('install jdcal==1.3') + c = p.pipenv("install jdcal==1.3") assert c.returncode == 0 - assert p.lockfile['default']['jdcal']['version'] == '==1.3' + assert p.lockfile["default"]["jdcal"]["version"] == "==1.3" with open(p.pipfile_path) as fh: pipfile_contents = fh.read() - assert '==1.3' in pipfile_contents - pipfile_contents = pipfile_contents.replace('==1.3', '*') - with open(p.pipfile_path, 'w') as fh: + assert "==1.3" in pipfile_contents + pipfile_contents = pipfile_contents.replace("==1.3", "*") + with open(p.pipfile_path, "w") as fh: fh.write(pipfile_contents) - c = p.pipenv('update jdcal') + c = p.pipenv("update jdcal") assert c.returncode == 0 - assert p.lockfile['default']['jdcal']['version'] == '==1.4' - c = p.pipenv('run pip freeze') + assert p.lockfile["default"]["jdcal"]["version"] == "==1.4" + c = p.pipenv("run pip freeze") assert c.returncode == 0 lines = c.stdout.splitlines() - assert 'jdcal==1.4' in [l.strip() for l in lines] + assert "jdcal==1.4" in [l.strip() for l in lines] @pytest.mark.project @pytest.mark.proper_names def test_proper_names_unmanaged_virtualenv(pipenv_instance_pypi): with pipenv_instance_pypi(): - c = subprocess_run(['python', '-m', 'virtualenv', '.venv']) + c = subprocess_run(["python", "-m", "virtualenv", ".venv"]) assert c.returncode == 0 project = Project() assert project.proper_names == [] @@ -75,9 +74,9 @@ def test_proper_names_unmanaged_virtualenv(pipenv_instance_pypi): @pytest.mark.cli def test_directory_with_leading_dash(pipenv_instance_pypi): with temp_environ(), pipenv_instance_pypi() as p: - c = p.pipenv('run pip freeze') + c = p.pipenv("run pip freeze") assert c.returncode == 0 - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 venv_path = c.stdout.strip() assert os.path.isdir(venv_path) diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index 12ac6ca0d..eee575294 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -14,8 +14,9 @@ @pytest.mark.environ def test_pipfile_envvar_expansion(pipenv_instance_pypi): with pipenv_instance_pypi() as p, temp_environ(): - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [[source]] url = 'https://${TEST_HOST}/simple' verify_ssl = false @@ -23,20 +24,21 @@ def test_pipfile_envvar_expansion(pipenv_instance_pypi): [packages] pytz = "*" - """.strip()) - os.environ['TEST_HOST'] = 'localhost:5000' + """.strip() + ) + os.environ["TEST_HOST"] = "localhost:5000" project = Project() - assert project.sources[0]['url'] == 'https://localhost:5000/simple' - assert 'localhost:5000' not in str(Pipfile.load(open(p.pipfile_path))) + assert project.sources[0]["url"] == "https://localhost:5000/simple" + assert "localhost:5000" not in str(Pipfile.load(open(p.pipfile_path))) print(str(Pipfile.load(open(p.pipfile_path)))) @pytest.mark.project @pytest.mark.sources -@pytest.mark.parametrize('lock_first', [True, False]) +@pytest.mark.parametrize("lock_first", [True, False]) def test_get_source(pipenv_instance_private_pypi, lock_first): with pipenv_instance_private_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" @@ -58,20 +60,17 @@ def test_get_source(pipenv_instance_private_pypi, lock_first): if lock_first: # force source to be cached - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 project = Project() - sources = [ - ['pypi', 'https://pypi.org/simple'], - ['testindex', p.index_url] - ] + sources = [["pypi", "https://pypi.org/simple"], ["testindex", p.index_url]] for src in sources: name, url = src - source = [s for s in project.pipfile_sources() if s.get('name') == name] + source = [s for s in project.pipfile_sources() if s.get("name") == name] assert source source = source[0] - assert source['name'] == name - assert source['url'] == url + assert source["name"] == name + assert source["url"] == url assert sorted(source.items()) == sorted(project.get_source(name=name).items()) assert sorted(source.items()) == sorted(project.get_source(url=url).items()) assert sorted(source.items()) == sorted(project.find_source(name).items()) @@ -80,11 +79,11 @@ def test_get_source(pipenv_instance_private_pypi, lock_first): @pytest.mark.install @pytest.mark.project -@pytest.mark.parametrize('newlines', ['\n', '\r\n']) +@pytest.mark.parametrize("newlines", ["\n", "\r\n"]) def test_maintain_file_line_endings(pipenv_instance_pypi, newlines): with pipenv_instance_pypi() as p: # Initial pipfile + lockfile generation - c = p.pipenv('install pytz') + c = p.pipenv("install pytz") assert c.returncode == 0 # Rewrite each file with parameterized newlines @@ -93,19 +92,21 @@ def test_maintain_file_line_endings(pipenv_instance_pypi, newlines): contents = f.read() # message because of https://github.com/pytest-dev/pytest/issues/3443 - with open(fn, 'w', newline=newlines) as f: + with open(fn, "w", newline=newlines) as f: f.write(contents) # Run pipenv install to programmatically rewrite - c = p.pipenv('install chardet') + c = p.pipenv("install chardet") assert c.returncode == 0 # Make sure we kept the right newlines for fn in [p.pipfile_path, p.lockfile_path]: with open(fn) as f: - f.read() # Consumes the content to detect newlines. + f.read() # Consumes the content to detect newlines. actual_newlines = f.newlines - assert actual_newlines == newlines, f'{actual_newlines!r} != {newlines!r} for {fn}' + assert ( + actual_newlines == newlines + ), f"{actual_newlines!r} != {newlines!r} for {fn}" @pytest.mark.project @@ -113,7 +114,7 @@ def test_maintain_file_line_endings(pipenv_instance_pypi, newlines): @pytest.mark.needs_internet def test_many_indexes(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] url = "{p.index_url}" @@ -137,21 +138,21 @@ def test_many_indexes(pipenv_instance_pypi): [dev-packages] """.strip() f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 @pytest.mark.project @pytest.mark.virtualenv @pytest.mark.skipif( - os.name == 'nt' and sys.version_info[:2] == (3, 8), - reason="Seems to work on 3.8 but not via the CI" + os.name == "nt" and sys.version_info[:2] == (3, 8), + reason="Seems to work on 3.8 but not via the CI", ) def test_run_in_virtualenv(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('run pip freeze') + c = p.pipenv("run pip freeze") assert c.returncode == 0 - assert 'Creating a virtualenv' in c.stderr + assert "Creating a virtualenv" in c.stderr project = Project() c = p.pipenv("run pip install click") assert c.returncode == 0 @@ -166,15 +167,16 @@ def test_run_in_virtualenv(pipenv_instance_pypi): assert c.returncode == 0 assert "click" in c.stdout + @pytest.mark.project @pytest.mark.sources def test_no_sources_in_pipfile(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] pytest = "*" """.strip() f.write(contents) - c = p.pipenv('install --skip-lock') + c = p.pipenv("install --skip-lock") assert c.returncode == 0 diff --git a/tests/integration/test_requirements.py b/tests/integration/test_requirements.py index 4a64a0bbb..8be73b7ca 100644 --- a/tests/integration/test_requirements.py +++ b/tests/integration/test_requirements.py @@ -9,9 +9,9 @@ @pytest.mark.requirements def test_requirements_generates_requirements_from_lockfile(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - packages = ('requests', '2.14.0') - dev_packages = ('flask', '0.12.2') - with open(p.pipfile_path, 'w') as f: + packages = ("requests", "2.14.0") + dev_packages = ("flask", "0.12.2") + with open(p.pipfile_path, "w") as f: contents = f""" [packages] {packages[0]}= "=={packages[1]}" @@ -19,36 +19,38 @@ def test_requirements_generates_requirements_from_lockfile(pipenv_instance_pypi) {dev_packages[0]}= "=={dev_packages[1]}" """.strip() f.write(contents) - p.pipenv('lock') - c = p.pipenv('requirements') + p.pipenv("lock") + c = p.pipenv("requirements") assert c.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in c.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' not in c.stdout + assert f"{packages[0]}=={packages[1]}" in c.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" not in c.stdout - d = p.pipenv('requirements --dev') + d = p.pipenv("requirements --dev") assert d.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in d.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in d.stdout + assert f"{packages[0]}=={packages[1]}" in d.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in d.stdout - e = p.pipenv('requirements --dev-only') + e = p.pipenv("requirements --dev-only") assert e.returncode == 0 - assert f'{packages[0]}=={packages[1]}' not in e.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in e.stdout + assert f"{packages[0]}=={packages[1]}" not in e.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in e.stdout - e = p.pipenv('requirements --hash') + e = p.pipenv("requirements --hash") assert e.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in e.stdout - for value in p.lockfile['default'].values(): - for hash in value['hashes']: - assert f' --hash={hash}' in e.stdout + assert f"{packages[0]}=={packages[1]}" in e.stdout + for value in p.lockfile["default"].values(): + for hash in value["hashes"]: + assert f" --hash={hash}" in e.stdout @pytest.mark.requirements -def test_requirements_generates_requirements_from_lockfile_multiple_sources(pipenv_instance_private_pypi): +def test_requirements_generates_requirements_from_lockfile_multiple_sources( + pipenv_instance_private_pypi, +): with pipenv_instance_private_pypi() as p: - packages = ('six', '1.12.0') - dev_packages = ('itsdangerous', '1.1.0') - with open(p.pipfile_path, 'w') as f: + packages = ("six", "1.12.0") + dev_packages = ("itsdangerous", "1.1.0") + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] name = "pypi" @@ -64,24 +66,26 @@ def test_requirements_generates_requirements_from_lockfile_multiple_sources(pipe {dev_packages[0]}= "=={dev_packages[1]}" """.strip() f.write(contents) - l = p.pipenv('lock') + l = p.pipenv("lock") assert l.returncode == 0 - c = p.pipenv('requirements') + c = p.pipenv("requirements") assert c.returncode == 0 - assert '-i https://pypi.org/simple' in c.stdout - assert '--extra-index-url https://some_other_source.org' in c.stdout + assert "-i https://pypi.org/simple" in c.stdout + assert "--extra-index-url https://some_other_source.org" in c.stdout @pytest.mark.requirements -def test_requirements_generates_requirements_from_lockfile_from_categories(pipenv_instance_private_pypi): +def test_requirements_generates_requirements_from_lockfile_from_categories( + pipenv_instance_private_pypi, +): with pipenv_instance_private_pypi() as p: - packages = ('six', '1.12.0') - dev_packages = ('itsdangerous', '1.1.0') - test_packages = ('pytest', '7.1.3') - doc_packages = ('docutils', '0.19') + packages = ("six", "1.12.0") + dev_packages = ("itsdangerous", "1.1.0") + test_packages = ("pytest", "7.1.3") + doc_packages = ("docutils", "0.19") - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = f""" [[source]] name = "pypi" @@ -97,32 +101,35 @@ def test_requirements_generates_requirements_from_lockfile_from_categories(pipen {doc_packages[0]}= "=={doc_packages[1]}" """.strip() f.write(contents) - l = p.pipenv('lock') + l = p.pipenv("lock") assert l.returncode == 0 - c = p.pipenv('requirements --dev-only') + c = p.pipenv("requirements --dev-only") assert c.returncode == 0 - assert f'{packages[0]}=={packages[1]}' not in c.stdout - assert f'{test_packages[0]}=={test_packages[1]}' not in c.stdout - assert f'{doc_packages[0]}=={doc_packages[1]}' not in c.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in c.stdout + assert f"{packages[0]}=={packages[1]}" not in c.stdout + assert f"{test_packages[0]}=={test_packages[1]}" not in c.stdout + assert f"{doc_packages[0]}=={doc_packages[1]}" not in c.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in c.stdout d = p.pipenv('requirements --categories="test, doc"') assert d.returncode == 0 - assert f'{packages[0]}=={packages[1]}' not in d.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' not in d.stdout - assert f'{test_packages[0]}=={test_packages[1]}' in d.stdout - assert f'{doc_packages[0]}=={doc_packages[1]}' in d.stdout + assert f"{packages[0]}=={packages[1]}" not in d.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" not in d.stdout + assert f"{test_packages[0]}=={test_packages[1]}" in d.stdout + assert f"{doc_packages[0]}=={doc_packages[1]}" in d.stdout @pytest.mark.requirements def test_requirements_generates_requirements_with_from_pipfile(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - packages = ('requests', '2.31.0') - sub_packages = ('urllib3', '2.2.1') # subpackages not explicitly written in Pipfile. - dev_packages = ('flask', '0.12.2') - - with open(p.pipfile_path, 'w') as f: + packages = ("requests", "2.31.0") + sub_packages = ( + "urllib3", + "2.2.1", + ) # subpackages not explicitly written in Pipfile. + dev_packages = ("flask", "0.12.2") + + with open(p.pipfile_path, "w") as f: contents = f""" [packages] {packages[0]} = "=={packages[1]}" @@ -130,59 +137,59 @@ def test_requirements_generates_requirements_with_from_pipfile(pipenv_instance_p {dev_packages[0]} = "=={dev_packages[1]}" """.strip() f.write(contents) - p.pipenv('lock') + p.pipenv("lock") - c = p.pipenv('requirements --from-pipfile') + c = p.pipenv("requirements --from-pipfile") assert c.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in c.stdout - assert f'{sub_packages[0]}=={sub_packages[1]}' not in c.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' not in c.stdout + assert f"{packages[0]}=={packages[1]}" in c.stdout + assert f"{sub_packages[0]}=={sub_packages[1]}" not in c.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" not in c.stdout - d = p.pipenv('requirements --dev --from-pipfile') + d = p.pipenv("requirements --dev --from-pipfile") assert d.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in d.stdout - assert f'{sub_packages[0]}=={sub_packages[1]}' not in d.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in d.stdout + assert f"{packages[0]}=={packages[1]}" in d.stdout + assert f"{sub_packages[0]}=={sub_packages[1]}" not in d.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in d.stdout - e = p.pipenv('requirements --dev-only --from-pipfile') + e = p.pipenv("requirements --dev-only --from-pipfile") assert e.returncode == 0 - assert f'{packages[0]}=={packages[1]}' not in e.stdout - assert f'{sub_packages[0]}=={sub_packages[1]}' not in e.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in e.stdout + assert f"{packages[0]}=={packages[1]}" not in e.stdout + assert f"{sub_packages[0]}=={sub_packages[1]}" not in e.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in e.stdout - f = p.pipenv('requirements --categories=dev-packages --from-pipfile') + f = p.pipenv("requirements --categories=dev-packages --from-pipfile") assert f.returncode == 0 - assert f'{packages[0]}=={packages[1]}' not in f.stdout - assert f'{sub_packages[0]}=={sub_packages[1]}' not in f.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in f.stdout + assert f"{packages[0]}=={packages[1]}" not in f.stdout + assert f"{sub_packages[0]}=={sub_packages[1]}" not in f.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in f.stdout - g = p.pipenv('requirements --categories=packages,dev-packages --from-pipfile') + g = p.pipenv("requirements --categories=packages,dev-packages --from-pipfile") assert g.returncode == 0 - assert f'{packages[0]}=={packages[1]}' in g.stdout - assert f'{sub_packages[0]}=={sub_packages[1]}' not in g.stdout - assert f'{dev_packages[0]}=={dev_packages[1]}' in g.stdout + assert f"{packages[0]}=={packages[1]}" in g.stdout + assert f"{sub_packages[0]}=={sub_packages[1]}" not in g.stdout + assert f"{dev_packages[0]}=={dev_packages[1]}" in g.stdout @pytest.mark.requirements def test_requirements_with_git_requirements(pipenv_instance_pypi): - req_hash = '3264a0046e1aa3c0a813335286ebdbc651f58b13' + req_hash = "3264a0046e1aa3c0a813335286ebdbc651f58b13" lockfile = { "_meta": {"sources": []}, "default": { "dataclasses-json": { "editable": True, "git": "https://github.com/lidatong/dataclasses-json.git", - "ref": req_hash + "ref": req_hash, } }, - "develop": {} + "develop": {}, } with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: + with open(p.lockfile_path, "w") as f: json.dump(lockfile, f) - c = p.pipenv('requirements') + c = p.pipenv("requirements") assert c.returncode == 0 assert "dataclasses-json" in c.stdout assert req_hash in c.stdout @@ -197,22 +204,22 @@ def test_requirements_markers_get_included(pipenv_instance_pypi): package: { "hashes": [ "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6", - "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" + "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255", ], "markers": markers, - "version": version + "version": version, } }, - "develop": {} + "develop": {}, } with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: + with open(p.lockfile_path, "w") as f: json.dump(lockfile, f) - c = p.pipenv('requirements') + c = p.pipenv("requirements") assert c.returncode == 0 - assert f'{package}{version}; {markers}' in c.stdout + assert f"{package}{version}; {markers}" in c.stdout @pytest.mark.requirements @@ -224,20 +231,20 @@ def test_requirements_markers_get_excluded(pipenv_instance_pypi): package: { "hashes": [ "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6", - "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" + "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255", ], "markers": markers, - "version": version + "version": version, } }, - "develop": {} + "develop": {}, } with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: + with open(p.lockfile_path, "w") as f: json.dump(lockfile, f) - c = p.pipenv('requirements --exclude-markers') + c = p.pipenv("requirements --exclude-markers") assert c.returncode == 0 assert markers not in c.stdout @@ -246,33 +253,35 @@ def test_requirements_markers_get_excluded(pipenv_instance_pypi): def test_requirements_hashes_get_included(pipenv_instance_pypi): package, version, markers = "werkzeug", "==2.1.2", "python_version >= '3.7'" first_hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6" - second_hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" + second_hash = ( + "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255" + ) lockfile = { "_meta": {"sources": []}, "default": { package: { - "hashes": [ - first_hash, - second_hash - ], + "hashes": [first_hash, second_hash], "markers": markers, - "version": version + "version": version, } }, - "develop": {} + "develop": {}, } with pipenv_instance_pypi() as p: - with open(p.lockfile_path, 'w') as f: + with open(p.lockfile_path, "w") as f: json.dump(lockfile, f) - c = p.pipenv('requirements --hash') + c = p.pipenv("requirements --hash") assert c.returncode == 0 - assert f'{package}{version}; {markers} --hash={first_hash} --hash={second_hash}' in c.stdout + assert ( + f"{package}{version}; {markers} --hash={first_hash} --hash={second_hash}" + in c.stdout + ) def test_requirements_generates_requirements_from_lockfile_without_env_var_expansion( - pipenv_instance_pypi, + pipenv_instance_pypi, ): lockfile = { "_meta": { @@ -292,8 +301,8 @@ def test_requirements_generates_requirements_from_lockfile_without_env_var_expan json.dump(lockfile, f) with temp_environ(): - os.environ['redacted_user'] = "example_user" - os.environ['redacted_pwd'] = "example_pwd" + os.environ["redacted_user"] = "example_user" + os.environ["redacted_pwd"] = "example_pwd" c = p.pipenv("requirements") assert c.returncode == 0 @@ -308,15 +317,10 @@ def test_requirements_generates_requirements_from_lockfile_without_env_var_expan "deps, include_hashes, include_markers, expected", [ ( - { - "django-storages": { - "version": "==1.12.3", - "extras": ["azure"] - } - }, + {"django-storages": {"version": "==1.12.3", "extras": ["azure"]}}, True, True, - ["django-storages[azure]==1.12.3"] + ["django-storages[azure]==1.12.3"], ), ( { @@ -326,23 +330,26 @@ def test_requirements_generates_requirements_from_lockfile_without_env_var_expan }, True, True, - ["https://gitlab.com/eVotUM/Cripto-py/-/archive/develop/Cripto-py-develop.zip"] + [ + "https://gitlab.com/eVotUM/Cripto-py/-/archive/develop/Cripto-py-develop.zip" + ], ), ( { "pyjwt": { "git": "https://github.com/jpadilla/pyjwt.git", "ref": "7665aa625506a11bae50b56d3e04413a3dc6fdf8", - "extras": ["crypto"] + "extras": ["crypto"], } }, True, True, - ["pyjwt[crypto]@ git+https://github.com/jpadilla/pyjwt.git@7665aa625506a11bae50b56d3e04413a3dc6fdf8"] - ) - ] + [ + "pyjwt[crypto]@ git+https://github.com/jpadilla/pyjwt.git@7665aa625506a11bae50b56d3e04413a3dc6fdf8" + ], + ), + ], ) def test_requirements_from_deps(deps, include_hashes, include_markers, expected): result = requirements_from_lockfile(deps, include_hashes, include_markers) assert result == expected - diff --git a/tests/integration/test_run.py b/tests/integration/test_run.py index 2b938aa9b..f0f3b2125 100644 --- a/tests/integration/test_run.py +++ b/tests/integration/test_run.py @@ -9,53 +9,60 @@ @pytest.mark.run @pytest.mark.dotenv def test_env(pipenv_instance_pypi): - with pipenv_instance_pypi(pipfile=False, ) as p: + with pipenv_instance_pypi( + pipfile=False, + ) as p: with open(os.path.join(p.path, ".env"), "w") as f: f.write("HELLO=WORLD") - c = subprocess_run(['pipenv', 'run', 'python', '-c', "import os; print(os.environ['HELLO'])"], env=p.env) + c = subprocess_run( + ["pipenv", "run", "python", "-c", "import os; print(os.environ['HELLO'])"], + env=p.env, + ) assert c.returncode == 0 - assert 'WORLD' in c.stdout + assert "WORLD" in c.stdout @pytest.mark.run def test_scripts(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: - f.write(r""" + with open(p.pipfile_path, "w") as f: + f.write( + r""" [scripts] printfoo = "python -c \"print('foo')\"" notfoundscript = "randomthingtotally" appendscript = "cmd arg1" multicommand = "bash -c \"cd docs && make html\"" - """) + """ + ) if os.name == "nt": f.write('scriptwithenv = "echo %HELLO%"\n') else: f.write('scriptwithenv = "echo $HELLO"\n') - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 - c = p.pipenv('run printfoo') + c = p.pipenv("run printfoo") assert c.returncode == 0 - assert c.stdout.strip() == 'foo' + assert c.stdout.strip() == "foo" - c = p.pipenv('run notfoundscript') + c = p.pipenv("run notfoundscript") assert c.returncode != 0 - assert c.stdout == '' - if os.name != 'nt': - assert 'could not be found' in c.stderr + assert c.stdout == "" + if os.name != "nt": + assert "could not be found" in c.stderr project = Project() - script = project.build_script('multicommand') - assert script.command == 'bash' - assert script.args == ['-c', 'cd docs && make html'] + script = project.build_script("multicommand") + assert script.command == "bash" + assert script.args == ["-c", "cd docs && make html"] - script = project.build_script('appendscript', ['a', 'b']) - assert script.command == 'cmd' - assert script.args == ['arg1', 'a', 'b'] + script = project.build_script("appendscript", ["a", "b"]) + assert script.command == "cmd" + assert script.args == ["arg1", "a", "b"] with temp_environ(): - os.environ['HELLO'] = 'WORLD' + os.environ["HELLO"] = "WORLD" c = p.pipenv("run scriptwithenv") assert c.returncode == 0 if os.name != "nt": # This doesn't work on CI windows. @@ -65,30 +72,34 @@ def test_scripts(pipenv_instance_pypi): @pytest.mark.run def test_scripts_with_package_functions(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - p.pipenv('install') + p.pipenv("install") pkg_path = os.path.join(p.path, "pkg") os.makedirs(pkg_path, exist_ok=True) file_path = os.path.join(pkg_path, "mod.py") with open(file_path, "w+") as f: - f.write(""" + f.write( + """ def test_func(): print("success") def arg_func(s, i): print(f"{s.upper()}. Easy as {i}") -""") +""" + ) - with open(p.pipfile_path, 'w') as f: - f.write(r""" + with open(p.pipfile_path, "w") as f: + f.write( + r""" [scripts] pkgfunc = {call = "pkg.mod:test_func"} argfunc = {call = "pkg.mod:arg_func('abc', 123)"} - """) + """ + ) - c = p.pipenv('run pkgfunc') + c = p.pipenv("run pkgfunc") assert c.stdout.strip() == "success" - c = p.pipenv('run argfunc') + c = p.pipenv("run argfunc") assert c.stdout.strip() == "ABC. Easy as 123" @@ -96,7 +107,7 @@ def arg_func(s, i): @pytest.mark.skip_windows def test_run_with_usr_env_shebang(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - p.pipenv('install') + p.pipenv("install") script_path = os.path.join(p.path, "test_script") with open(script_path, "w") as f: f.write( @@ -114,7 +125,7 @@ def test_run_with_usr_env_shebang(pipenv_instance_pypi): @pytest.mark.run -@pytest.mark.parametrize('quiet', [True, False]) +@pytest.mark.parametrize("quiet", [True, False]) def test_scripts_resolve_dot_env_vars(quiet, pipenv_instance_pypi): with pipenv_instance_pypi() as p: with open(".env", "w") as f: @@ -130,29 +141,29 @@ def test_scripts_resolve_dot_env_vars(quiet, pipenv_instance_pypi): """.strip() f.write(contents) if quiet: - c = p.pipenv('run --quiet hello') + c = p.pipenv("run --quiet hello") else: - c = p.pipenv('run hello') + c = p.pipenv("run hello") assert c.returncode == 0 - assert 'WORLD' in c.stdout + assert "WORLD" in c.stdout @pytest.mark.run -@pytest.mark.parametrize('quiet', [True, False]) +@pytest.mark.parametrize("quiet", [True, False]) def test_pipenv_run_pip_freeze_has_expected_output(quiet, pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: + with open(p.pipfile_path, "w") as f: contents = """ [packages] requests = "==2.14.0" """.strip() f.write(contents) - c = p.pipenv('install') + c = p.pipenv("install") assert c.returncode == 0 if quiet: - c = p.pipenv('run --quiet pip freeze') + c = p.pipenv("run --quiet pip freeze") else: - c = p.pipenv('run pip freeze') + c = p.pipenv("run pip freeze") assert c.returncode == 0 - assert 'requests==2.14.0' in c.stdout + assert "requests==2.14.0" in c.stdout diff --git a/tests/integration/test_sync.py b/tests/integration/test_sync.py index 0161dbae3..0f891a4b1 100644 --- a/tests/integration/test_sync.py +++ b/tests/integration/test_sync.py @@ -7,14 +7,16 @@ @pytest.mark.sync def test_sync_error_without_lockfile(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] - """.strip()) + """.strip() + ) - c = p.pipenv('sync') + c = p.pipenv("sync") assert c.returncode != 0 - assert 'Pipfile.lock not found!' in c.stderr + assert "Pipfile.lock not found!" in c.stderr @pytest.mark.sync @@ -22,9 +24,10 @@ def test_sync_error_without_lockfile(pipenv_instance_pypi): def test_mirror_lock_sync(pipenv_instance_private_pypi): with temp_environ(), pipenv_instance_private_pypi() as p: mirror_url = p.index_url - assert 'pypi.org' not in mirror_url - with open(p.pipfile_path, 'w') as f: - f.write(""" + assert "pypi.org" not in mirror_url + with open(p.pipfile_path, "w") as f: + f.write( + """ [[source]] name = "pypi" url = "https://pypi.org/simple" @@ -32,37 +35,40 @@ def test_mirror_lock_sync(pipenv_instance_private_pypi): [packages] six = "==1.12.0" - """.strip()) - c = p.pipenv(f'lock --pypi-mirror {mirror_url}') + """.strip() + ) + c = p.pipenv(f"lock --pypi-mirror {mirror_url}") assert c.returncode == 0 - c = p.pipenv(f'sync --pypi-mirror {mirror_url}') + c = p.pipenv(f"sync --pypi-mirror {mirror_url}") assert c.returncode == 0 @pytest.mark.sync @pytest.mark.lock def test_sync_should_not_lock(pipenv_instance_pypi): - """Sync should not touch the lock file, even if Pipfile is changed. - """ + """Sync should not touch the lock file, even if Pipfile is changed.""" with pipenv_instance_pypi() as p: - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] - """.strip()) + """.strip() + ) # Perform initial lock. - c = p.pipenv('lock') + c = p.pipenv("lock") assert c.returncode == 0 lockfile_content = p.lockfile assert lockfile_content # Make sure sync does not trigger lockfile update. - with open(p.pipfile_path, 'w') as f: - f.write(""" + with open(p.pipfile_path, "w") as f: + f.write( + """ [packages] six = "*" - """.strip()) - c = p.pipenv('sync') + """.strip() + ) + c = p.pipenv("sync") assert c.returncode == 0 assert lockfile_content == p.lockfile - diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index e8a6bf539..04c338bd9 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -21,7 +21,9 @@ def test_uninstall_requests(pipenv_instance_pypi): @pytest.mark.uninstall -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_uninstall_django(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: c = p.pipenv("install Django") @@ -45,10 +47,11 @@ def test_uninstall_django(pipenv_instance_private_pypi): @pytest.mark.install @pytest.mark.uninstall -@pytest.mark.skipif(sys.version_info >= (3, 12), reason="Package does not work with Python 3.12") +@pytest.mark.skipif( + sys.version_info >= (3, 12), reason="Package does not work with Python 3.12" +) def test_mirror_uninstall(pipenv_instance_pypi): with temp_environ(), pipenv_instance_pypi() as p: - mirror_url = DEFAULT_PRIVATE_PYPI_SERVER assert "pypi.org" not in mirror_url @@ -86,12 +89,17 @@ def test_mirror_uninstall(pipenv_instance_pypi): @pytest.mark.uninstall def test_uninstall_all_local_files(pipenv_instance_private_pypi, testsroot): with pipenv_instance_private_pypi() as p: - file_uri = p._pipfile.get_fixture_path("tablib/tablib-0.12.1.tar.gz", fixtures="pypi").as_uri() + file_uri = p._pipfile.get_fixture_path( + "tablib/tablib-0.12.1.tar.gz", fixtures="pypi" + ).as_uri() c = p.pipenv(f"install {file_uri}") assert c.returncode == 0 c = p.pipenv("uninstall --all") - assert "tablib" not in p.pipfile["packages"] - assert "tablib" not in p.lockfile["default"] + assert c.returncode == 0 + assert "tablib" in c.stdout + # Uninstall --all is not supposed to remove things from the pipfile + # Note that it didn't before, but that instead local filenames showed as hashes + assert "tablib" in p.pipfile["packages"] @pytest.mark.install @@ -302,6 +310,7 @@ def test_sorting_handles_str_values_and_dict_values(pipenv_instance_private_pypi "zipp", ] + @pytest.mark.install @pytest.mark.uninstall def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): @@ -327,3 +336,23 @@ def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): "colorama", "atomicwrites", ] + + +@pytest.mark.uninstall +def test_uninstall_without_venv(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[packages] +colorama = "*" +atomicwrites = "*" + """.strip() + f.write(contents) + + c = p.pipenv("install") + assert c.returncode == 0 + + c = p.pipenv("uninstall --all") + assert c.returncode == 0 + # uninstall --all shold not remove packages from Pipfile + assert list(p.pipfile["packages"].keys()) == ["colorama", "atomicwrites"] diff --git a/tests/integration/test_update.py b/tests/integration/test_update.py index 7b6acf5ea..51c133db9 100644 --- a/tests/integration/test_update.py +++ b/tests/integration/test_update.py @@ -6,7 +6,7 @@ @pytest.mark.update @pytest.mark.skipif( "os.name == 'nt' and sys.version_info[:2] == (3, 8)", - reason="Seems to work on 3.8 but not via the CI" + reason="Seems to work on 3.8 but not via the CI", ) def test_update_outdated_with_outdated_package(pipenv_instance_private_pypi, cmd_option): with pipenv_instance_private_pypi() as p: diff --git a/tests/integration/test_windows.py b/tests/integration/test_windows.py index 402ce81b4..46a9434f8 100644 --- a/tests/integration/test_windows.py +++ b/tests/integration/test_windows.py @@ -7,19 +7,18 @@ from pipenv.utils.processes import subprocess_run # This module is run only on Windows. -pytestmark = pytest.mark.skipif(os.name != 'nt', reason="only relevant on windows") +pytestmark = pytest.mark.skipif(os.name != "nt", reason="only relevant on windows") @pytest.mark.project def test_case_changes_windows(pipenv_instance_pypi): - """Test project matching for case changes on Windows. - """ + """Test project matching for case changes on Windows.""" with pipenv_instance_pypi() as p: - c = p.pipenv('install pytz') + c = p.pipenv("install pytz") assert c.returncode == 0 # Canonical venv location. - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 virtualenv_location = c.stdout.strip() @@ -27,12 +26,12 @@ def test_case_changes_windows(pipenv_instance_pypi): target = p.path.upper() if target == p.path: target = p.path.lower() - os.chdir('..') + os.chdir("..") os.chdir(target) assert os.path.abspath(os.curdir) != p.path # Ensure the incorrectly-cased project can find the correct venv. - c = p.pipenv('--venv') + c = p.pipenv("--venv") assert c.returncode == 0 assert c.stdout.strip().lower() == virtualenv_location.lower() @@ -40,9 +39,8 @@ def test_case_changes_windows(pipenv_instance_pypi): @pytest.mark.files @pytest.mark.local def test_local_path_windows(pipenv_instance_pypi): - whl = ( - Path(__file__).parent.parent - .joinpath('pypi', 'six', 'six-1.11.0-py2.py3-none-any.whl') + whl = Path(__file__).parent.parent.joinpath( + "pypi", "six", "six-1.11.0-py2.py3-none-any.whl" ) try: whl = whl.resolve() @@ -56,9 +54,8 @@ def test_local_path_windows(pipenv_instance_pypi): @pytest.mark.local @pytest.mark.files def test_local_path_windows_forward_slash(pipenv_instance_pypi): - whl = ( - Path(__file__).parent.parent - .joinpath('pypi', 'six', 'six-1.11.0-py2.py3-none-any.whl') + whl = Path(__file__).parent.parent.joinpath( + "pypi", "six", "six-1.11.0-py2.py3-none-any.whl" ) try: whl = whl.resolve() @@ -70,20 +67,20 @@ def test_local_path_windows_forward_slash(pipenv_instance_pypi): @pytest.mark.skipif( - os.name == 'nt' and sys.version_info[:2] == (3, 8), - reason="Seems to work on 3.8 but not via the CI" + os.name == "nt" and sys.version_info[:2] == (3, 8), + reason="Seems to work on 3.8 but not via the CI", ) @pytest.mark.cli def test_pipenv_clean_windows(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv('install dataclasses-json') + c = p.pipenv("install dataclasses-json") assert c.returncode == 0 - c = p.pipenv(f'run pip install -i {p.index_url} click') + c = p.pipenv(f"run pip install -i {p.index_url} click") assert c.returncode == 0 - c = p.pipenv('clean --dry-run') + c = p.pipenv("clean --dry-run") assert c.returncode == 0 - assert 'click' in c.stdout.strip() + assert "click" in c.stdout.strip() @pytest.mark.cli diff --git a/tests/unit/test_cmdparse.py b/tests/unit/test_cmdparse.py index 9afc0b09c..6c1436441 100644 --- a/tests/unit/test_cmdparse.py +++ b/tests/unit/test_cmdparse.py @@ -6,54 +6,59 @@ @pytest.mark.run @pytest.mark.script def test_parse(): - script = Script.parse(['python', '-c', "print('hello')"]) - assert script.command == 'python' - assert script.args == ['-c', "print('hello')"], script + script = Script.parse(["python", "-c", "print('hello')"]) + assert script.command == "python" + assert script.args == ["-c", "print('hello')"], script @pytest.mark.run @pytest.mark.script def test_parse_error(): with pytest.raises(ScriptEmptyError) as e: - Script.parse('') + Script.parse("") assert str(e.value) == "[]" @pytest.mark.run def test_extend(): - script = Script('python', ['-c', "print('hello')"]) - script.extend(['--verbose']) - assert script.command == 'python' - assert script.args == ['-c', "print('hello')", "--verbose"], script + script = Script("python", ["-c", "print('hello')"]) + script.extend(["--verbose"]) + assert script.command == "python" + assert script.args == ["-c", "print('hello')", "--verbose"], script @pytest.mark.run @pytest.mark.script def test_cmdify(): - script = Script('python', ['-c', "print('hello world')"]) + script = Script("python", ["-c", "print('hello world')"]) cmd = script.cmdify() - assert cmd == 'python -c "print(\'hello world\')"', script + assert cmd == "python -c \"print('hello world')\"", script @pytest.mark.run @pytest.mark.script def test_cmdify_complex(): - script = Script.parse(' '.join([ - '"C:\\Program Files\\Python36\\python.exe" -c', - """ "print(\'Double quote: \\\"\')" """.strip(), - ])) - assert script.cmdify() == ' '.join([ - '"C:\\Program Files\\Python36\\python.exe"', - '-c', - """ "print(\'Double quote: \\\"\')" """.strip(), - ]), script + script = Script.parse( + " ".join( + [ + '"C:\\Program Files\\Python36\\python.exe" -c', + """ "print(\'Double quote: \\\"\')" """.strip(), + ] + ) + ) + assert script.cmdify() == " ".join( + [ + '"C:\\Program Files\\Python36\\python.exe"', + "-c", + """ "print(\'Double quote: \\\"\')" """.strip(), + ] + ), script @pytest.mark.run @pytest.mark.script def test_cmdify_quote_if_paren_in_command(): - """Ensure ONLY the command is quoted if it contains parentheses. - """ + """Ensure ONLY the command is quoted if it contains parentheses.""" script = Script.parse('"C:\\Python36(x86)\\python.exe" -c print(123)') assert script.cmdify() == '"C:\\Python36(x86)\\python.exe" -c print(123)', script @@ -61,7 +66,6 @@ def test_cmdify_quote_if_paren_in_command(): @pytest.mark.run @pytest.mark.script def test_cmdify_quote_if_carets(): - """Ensure arguments are quoted if they contain carets. - """ - script = Script('foo^bar', ['baz^rex']) + """Ensure arguments are quoted if they contain carets.""" + script = Script("foo^bar", ["baz^rex"]) assert script.cmdify() == '"foo^bar" "baz^rex"', script diff --git a/tests/unit/test_core.py b/tests/unit/test_core.py index 2702ea05e..f85769745 100644 --- a/tests/unit/test_core.py +++ b/tests/unit/test_core.py @@ -12,24 +12,27 @@ def test_suppress_nested_venv_warning(capsys, project): # Capture the stderr of warn_in_virtualenv to test for the presence of the # courtesy notice. - project.s.PIPENV_VIRTUALENV = 'totallyrealenv' + project.s.PIPENV_VIRTUALENV = "totallyrealenv" project.s.PIPENV_VERBOSITY = -1 warn_in_virtualenv(project) output, err = capsys.readouterr() - assert 'Courtesy Notice' not in err + assert "Courtesy Notice" not in err @pytest.mark.core def test_load_dot_env_from_environment_variable_location(monkeypatch, capsys, project): - with temp_environ(), monkeypatch.context() as m, TemporaryDirectory(prefix='pipenv-', suffix='') as tempdir: + with temp_environ(), monkeypatch.context() as m, TemporaryDirectory( + prefix="pipenv-", suffix="" + ) as tempdir: if os.name == "nt": from pipenv.vendor import click + is_console = False m.setattr(click._winconsole, "_is_console", lambda x: is_console) - dotenv_path = os.path.join(tempdir, 'test.env') - key, val = 'SOME_KEY', 'some_value' - with open(dotenv_path, 'w') as f: - f.write(f'{key}={val}') + dotenv_path = os.path.join(tempdir, "test.env") + key, val = "SOME_KEY", "some_value" + with open(dotenv_path, "w") as f: + f.write(f"{key}={val}") project.s.PIPENV_DOTENV_LOCATION = str(dotenv_path) load_dot_env(project) @@ -38,15 +41,18 @@ def test_load_dot_env_from_environment_variable_location(monkeypatch, capsys, pr @pytest.mark.core def test_doesnt_load_dot_env_if_disabled(monkeypatch, capsys, project): - with temp_environ(), monkeypatch.context() as m, TemporaryDirectory(prefix='pipenv-', suffix='') as tempdir: + with temp_environ(), monkeypatch.context() as m, TemporaryDirectory( + prefix="pipenv-", suffix="" + ) as tempdir: if os.name == "nt": from pipenv.vendor import click + is_console = False m.setattr(click._winconsole, "_is_console", lambda x: is_console) - dotenv_path = os.path.join(tempdir, 'test.env') - key, val = 'SOME_KEY', 'some_value' - with open(dotenv_path, 'w') as f: - f.write(f'{key}={val}') + dotenv_path = os.path.join(tempdir, "test.env") + key, val = "SOME_KEY", "some_value" + with open(dotenv_path, "w") as f: + f.write(f"{key}={val}") project.s.PIPENV_DOTENV_LOCATION = str(dotenv_path) project.s.PIPENV_DONT_LOAD_ENV = True @@ -59,13 +65,16 @@ def test_doesnt_load_dot_env_if_disabled(monkeypatch, capsys, project): @pytest.mark.core def test_load_dot_env_warns_if_file_doesnt_exist(monkeypatch, capsys, project): - with temp_environ(), monkeypatch.context() as m, TemporaryDirectory(prefix='pipenv-', suffix='') as tempdir: + with temp_environ(), monkeypatch.context() as m, TemporaryDirectory( + prefix="pipenv-", suffix="" + ) as tempdir: if os.name == "nt": from pipenv.vendor import click + is_console = False m.setattr(click._winconsole, "_is_console", lambda x: is_console) - dotenv_path = os.path.join(tempdir, 'does-not-exist.env') + dotenv_path = os.path.join(tempdir, "does-not-exist.env") project.s.PIPENV_DOTENV_LOCATION = str(dotenv_path) load_dot_env(project) output, err = capsys.readouterr() - assert 'Warning' in err + assert "Warning" in err diff --git a/tests/unit/test_funktools.py b/tests/unit/test_funktools.py index 39c7b3ec0..885a5274c 100644 --- a/tests/unit/test_funktools.py +++ b/tests/unit/test_funktools.py @@ -5,19 +5,24 @@ def test_unnest(): nested_iterable = ( - 1234, (3456, 4398345, (234234)), ( - 2396, ( - 928379, 29384, ( - 293759, 2347, ( - 2098, 7987, 27599 - ) - ) - ) - ) - ) - list(unnest(nested_iterable)) == [1234, 3456, 4398345, 234234, - 2396, 928379, 29384, 293759, - 2347, 2098, 7987, 27599] + 1234, + (3456, 4398345, (234234)), + (2396, (928379, 29384, (293759, 2347, (2098, 7987, 27599)))), + ) + list(unnest(nested_iterable)) == [ + 1234, + 3456, + 4398345, + 234234, + 2396, + 928379, + 29384, + 293759, + 2347, + 2098, + 7987, + 27599, + ] @pytest.mark.parametrize( diff --git a/tests/unit/test_help.py b/tests/unit/test_help.py index 812925f75..82893f996 100644 --- a/tests/unit/test_help.py +++ b/tests/unit/test_help.py @@ -9,7 +9,8 @@ @pytest.mark.help def test_help(): output = subprocess.check_output( - [sys.executable, '-m', 'pipenv.help'], - stderr=subprocess.STDOUT, env=os.environ.copy(), + [sys.executable, "-m", "pipenv.help"], + stderr=subprocess.STDOUT, + env=os.environ.copy(), ) assert output diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 22a96415a..73f5c6ec2 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -15,8 +15,14 @@ DEP_PIP_PAIRS = [ ({"django": ">1.10"}, {"django": "django>1.10"}), ({"Django": ">1.10"}, {"Django": "Django>1.10"}), - ({"requests": {"extras": ["socks"], "version": ">1.10"}}, {"requests": "requests[socks]>1.10"}), - ({"requests": {"extras": ["socks"], "version": "==1.10"}}, {"requests": "requests[socks]==1.10"}), + ( + {"requests": {"extras": ["socks"], "version": ">1.10"}}, + {"requests": "requests[socks]>1.10"}, + ), + ( + {"requests": {"extras": ["socks"], "version": "==1.10"}}, + {"requests": "requests[socks]==1.10"}, + ), ( { "dataclasses-json": { @@ -25,11 +31,20 @@ "editable": True, } }, - {"dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7"}, + { + "dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" + }, ), ( - {"dataclasses-json": {"git": "https://github.com/lidatong/dataclasses-json.git", "ref": "v0.5.7"}}, - {"dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7"}, + { + "dataclasses-json": { + "git": "https://github.com/lidatong/dataclasses-json.git", + "ref": "v0.5.7", + } + }, + { + "dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" + }, ), ( # Extras in url @@ -39,7 +54,9 @@ "extras": ["pipenv"], } }, - {"dparse": "dparse[pipenv] @ https://github.com/oz123/dparse/archive/refs/heads/master.zip"}, + { + "dparse": "dparse[pipenv] @ https://github.com/oz123/dparse/archive/refs/heads/master.zip" + }, ), ( { @@ -50,13 +67,22 @@ "editable": False, } }, - {"requests": "requests[security]@ git+https://github.com/requests/requests.git@main"}, + { + "requests": "requests[security]@ git+https://github.com/requests/requests.git@main" + }, ), ] -def mock_unpack(link, source_dir, download_dir, only_download=False, session=None, - hashes=None, progress_bar="off"): +def mock_unpack( + link, + source_dir, + download_dir, + only_download=False, + session=None, + hashes=None, + progress_bar="off", +): return @@ -95,7 +121,9 @@ def test_convert_deps_to_pip_extras_no_version(): "hash": "sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", } }, - {"FooProject": "FooProject==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"}, + { + "FooProject": "FooProject==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" + }, ), ( { @@ -105,7 +133,9 @@ def test_convert_deps_to_pip_extras_no_version(): "hash": "sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824", } }, - {"FooProject": "FooProject[stuff]==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"}, + { + "FooProject": "FooProject[stuff]==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" + }, ), ( { @@ -115,7 +145,9 @@ def test_convert_deps_to_pip_extras_no_version(): "extras": ["standard"], } }, - {"uvicorn": "git+https://github.com/encode/uvicorn.git@master#egg=uvicorn[standard]"}, + { + "uvicorn": "git+https://github.com/encode/uvicorn.git@master#egg=uvicorn[standard]" + }, ), ], ) @@ -146,23 +178,35 @@ def test_get_constraints_from_deps(deps, expected): assert dependencies.get_constraints_from_deps(deps) == expected -@pytest.mark.parametrize("line,result", [ - ("-i https://example.com/simple/", ("https://example.com/simple/", None, None, [])), - ("--extra-index-url=https://example.com/simple/", (None, "https://example.com/simple/", None, [])), - ("--trusted-host=example.com", (None, None, "example.com", [])), - ("# -i https://example.com/simple/", (None, None, None, [])), - ("requests # -i https://example.com/simple/", (None, None, None, ["requests"])), -]) +@pytest.mark.parametrize( + "line,result", + [ + ( + "-i https://example.com/simple/", + ("https://example.com/simple/", None, None, []), + ), + ( + "--extra-index-url=https://example.com/simple/", + (None, "https://example.com/simple/", None, []), + ), + ("--trusted-host=example.com", (None, None, "example.com", [])), + ("# -i https://example.com/simple/", (None, None, None, [])), + ("requests # -i https://example.com/simple/", (None, None, None, ["requests"])), + ], +) @pytest.mark.utils def test_parse_indexes(line, result): assert indexes.parse_indexes(line) == result -@pytest.mark.parametrize("line", [ - "-i https://example.com/simple/ --extra-index-url=https://extra.com/simple/", - "--extra-index-url https://example.com/simple/ --trusted-host=example.com", - "requests -i https://example.com/simple/", -]) +@pytest.mark.parametrize( + "line", + [ + "-i https://example.com/simple/ --extra-index-url=https://extra.com/simple/", + "--extra-index-url https://example.com/simple/ --trusted-host=example.com", + "requests -i https://example.com/simple/", + ], +) @pytest.mark.utils def test_parse_indexes_individual_lines(line): with pytest.raises(ValueError): @@ -207,6 +251,7 @@ def test_is_required_version(self, version, specified_ver, expected): @pytest.mark.vcs def test_is_vcs(self, entry, expected): from pipenv.utils.requirementslib import is_vcs + assert is_vcs(entry) is expected @pytest.mark.utils @@ -230,12 +275,13 @@ def test_python_version_from_non_python(self): ), ], ) - def test_python_version_output_variants( - self, monkeypatch, version_output, version - ): + def test_python_version_output_variants(self, monkeypatch, version_output, version): def mock_version(path): return version_output.split()[1] - monkeypatch.setattr("pipenv.vendor.pythonfinder.utils.get_python_version", mock_version) + + monkeypatch.setattr( + "pipenv.vendor.pythonfinder.utils.get_python_version", mock_version + ) assert dependencies.python_version("some/path") == version @pytest.mark.utils @@ -255,25 +301,28 @@ def test_download_file(self): os.remove(output) @pytest.mark.utils - @pytest.mark.parametrize('line, expected', [ - ("python", True), - ("python3.7", True), - ("python2.7", True), - ("python2", True), - ("python3", True), - ("pypy3", True), - ("anaconda3-5.3.0", True), - ("which", False), - ("vim", False), - ("miniconda", True), - ("micropython", True), - ("ironpython", True), - ("jython3.5", True), - ("2", True), - ("2.7", True), - ("3.7", True), - ("3", True) - ]) + @pytest.mark.parametrize( + "line, expected", + [ + ("python", True), + ("python3.7", True), + ("python2.7", True), + ("python2", True), + ("python3", True), + ("pypy3", True), + ("anaconda3-5.3.0", True), + ("which", False), + ("vim", False), + ("miniconda", True), + ("micropython", True), + ("ironpython", True), + ("jython3.5", True), + ("2", True), + ("2.7", True), + ("3.7", True), + ("3", True), + ], + ) def test_is_python_command(self, line, expected): assert shell.is_python_command(line) == expected @@ -387,7 +436,10 @@ def test_nix_normalize_drive(self, input_path, expected): ( [ {"url": "https://pypi.org/simple"}, - {"url": "https://custom.example.com:12345/simple", "verify_ssl": False}, + { + "url": "https://custom.example.com:12345/simple", + "verify_ssl": False, + }, ], [ "-i", @@ -444,10 +496,7 @@ def test_nix_normalize_drive(self, input_path, expected): ], ) def test_prepare_pip_source_args(self, sources, expected_args): - assert ( - indexes.prepare_pip_source_args(sources, pip_args=None) - == expected_args - ) + assert indexes.prepare_pip_source_args(sources, pip_args=None) == expected_args @pytest.mark.utils def test_invalid_prepare_pip_source_args(self): @@ -479,7 +528,7 @@ def mock_shutil_which(command, path=None): ("1", True), ("off", False), ("0", False), - ) + ), ) def test_env_to_bool(self, val, expected): actual = shell.env_to_bool(val) diff --git a/tests/unit/test_utils_windows_executable.py b/tests/unit/test_utils_windows_executable.py index 4236ff856..a4ffe71bb 100644 --- a/tests/unit/test_utils_windows_executable.py +++ b/tests/unit/test_utils_windows_executable.py @@ -8,45 +8,45 @@ # This module is run only on Windows. pytestmark = pytest.mark.skipif( - os.name != 'nt', + os.name != "nt", reason="only relevant on windows", ) @pytest.mark.utils @pytest.mark.skipif(os.name != "nt", reason="Windows test only") -@mock.patch('os.path.isfile') -@mock.patch('shutil.which') +@mock.patch("os.path.isfile") +@mock.patch("shutil.which") def test_find_windows_executable_when_not_found(mocked_which, mocked_isfile): mocked_isfile.return_value = False mocked_which.return_value = None - found = shell.find_windows_executable('fake/path', 'python') + found = shell.find_windows_executable("fake/path", "python") assert found is None assert mocked_isfile.call_count > 1 - calls = [mock.call('fake\\path\\python')] + [ - mock.call(f'fake\\path\\python{ext.lower()}') - for ext in os.environ['PATHEXT'].split(';') + calls = [mock.call("fake\\path\\python")] + [ + mock.call(f"fake\\path\\python{ext.lower()}") + for ext in os.environ["PATHEXT"].split(";") ] assert mocked_isfile.mock_calls == calls @pytest.mark.utils @pytest.mark.skipif(os.name != "nt", reason="Windows test only") -@mock.patch('os.path.isfile') -@mock.patch('shutil.which') +@mock.patch("os.path.isfile") +@mock.patch("shutil.which") def test_find_windows_executable_when_found(mocked_which, mocked_isfile): mocked_isfile.return_value = False - found_path = '/fake/known/system/path/pyenv' + found_path = "/fake/known/system/path/pyenv" mocked_which.return_value = found_path - found = shell.find_windows_executable('fake/path', 'pyenv') + found = shell.find_windows_executable("fake/path", "pyenv") assert found is found_path assert mocked_isfile.call_count > 1 - calls = [mock.call('fake\\path\\pyenv')] + [ - mock.call(f'fake\\path\\pyenv{ext.lower()}') - for ext in os.environ['PATHEXT'].split(';') + calls = [mock.call("fake\\path\\pyenv")] + [ + mock.call(f"fake\\path\\pyenv{ext.lower()}") + for ext in os.environ["PATHEXT"].split(";") ] assert mocked_isfile.mock_calls == calls diff --git a/tests/unit/test_vendor.py b/tests/unit/test_vendor.py index d6dc09e59..cd5fa7e2d 100644 --- a/tests/unit/test_vendor.py +++ b/tests/unit/test_vendor.py @@ -10,36 +10,39 @@ from pipenv.vendor import tomlkit -@pytest.mark.parametrize('dt, content', [ - ( # Date. - datetime.date(1992, 8, 19), - '1992-08-19', - ), - ( # Naive time. - datetime.time(15, 10), - '15:10:00', - ), - ( # Aware time in UTC. - datetime.time(15, 10, tzinfo=pytz.UTC), - '15:10:00+00:00', - ), - ( # Aware local time. - datetime.time(15, 10, tzinfo=pytz.FixedOffset(8 * 60)), - '15:10:00+08:00', - ), - ( # Naive datetime. - datetime.datetime(1992, 8, 19, 15, 10), - '1992-08-19T15:10:00', - ), - ( # Aware datetime in UTC. - datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.UTC), - '1992-08-19T15:10:00Z', - ), - ( # Aware local datetime. - datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.FixedOffset(8 * 60)), - '1992-08-19T15:10:00+08:00', - ), -]) +@pytest.mark.parametrize( + "dt, content", + [ + ( # Date. + datetime.date(1992, 8, 19), + "1992-08-19", + ), + ( # Naive time. + datetime.time(15, 10), + "15:10:00", + ), + ( # Aware time in UTC. + datetime.time(15, 10, tzinfo=pytz.UTC), + "15:10:00+00:00", + ), + ( # Aware local time. + datetime.time(15, 10, tzinfo=pytz.FixedOffset(8 * 60)), + "15:10:00+08:00", + ), + ( # Naive datetime. + datetime.datetime(1992, 8, 19, 15, 10), + "1992-08-19T15:10:00", + ), + ( # Aware datetime in UTC. + datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.UTC), + "1992-08-19T15:10:00Z", + ), + ( # Aware local datetime. + datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.FixedOffset(8 * 60)), + "1992-08-19T15:10:00+08:00", + ), + ], +) def test_token_date(dt, content): item = tomlkit.item(dt) assert item.as_string() == content