diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index ee3478d2de..07169ae8e4 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -12,4 +12,4 @@ jobs: steps: - uses: actions/checkout@v4 - run: pip install --user ruff - - run: ruff --format=github . + - run: ruff --output-format=github . diff --git a/CHANGELOG.md b/CHANGELOG.md index 86fffcd92b..4ff2b77b35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,40 @@ +2023.10.20 (2023-10-20) +======================= + +Features & Improvements +----------------------- + +- Add quiet option to pipenv shell, hiding "Launching subshell in virtual environment..." `#5966 `_ +- Vendor in pip==23.3 which includes updates to certifi, urllib3, and adds truststore among other improvements. `#5979 `_ + +Behavior Changes +---------------- + +- Change ``--py`` to use ``print`` preventing insertion of newline characters `#5969 `_ + +Vendored Libraries +------------------ + +- Drop pep517 - as it is no longer used. `#5970 `_ + +Removals and Deprecations +------------------------- + +- Drop support for Python 3.7 `#5879 `_ + + +2023.10.3 (2023-10-03) +====================== + +Bug Fixes +--------- + +- Eveb better handling of vcs branch references that contain special characters. `#5934 `_ +- Bump certifi from 2023.5.7 to 2023.7.22 in /examples to address a security vulnerability `#5941 `_ + + 2023.9.8 (2023-09-08) ===================== -Pipenv 2023.9.8 (2023-09-08) -============================ - Bug Fixes --------- @@ -12,9 +44,6 @@ Bug Fixes 2023.9.7 (2023-09-07) ===================== -Pipenv 2023.9.7 (2023-09-07) -============================ - Features & Improvements ----------------------- diff --git a/Makefile b/Makefile index fe5f493213..e518d9421d 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ venv_dir := $(get_venv_dir)/pipenv_venv venv_file := $(CURDIR)/.test_venv get_venv_path =$(file < $(venv_file)) # This is how we will build tag-specific wheels, e.g. py36 or py37 -PY_VERSIONS:= 3.7 3.8 3.9 3.10 3.11 +PY_VERSIONS:= 3.8 3.9 3.10 3.11 BACKSLASH = '\\' # This is how we will build generic wheels, e.g. py2 or py3 INSTALL_TARGETS := $(addprefix install-py,$(PY_VERSIONS)) @@ -96,7 +96,7 @@ retest: virtualenv submodules test-install .PHONY: build build: install-virtualenvs.stamp install.stamp - PIPENV_PYTHON=3.7 pipenv run python -m build + PIPENV_PYTHON=3.8 pipenv run python -m build .PHONY: update-version update-version: diff --git a/README.md b/README.md index 1710bb251b..ce90155097 100644 --- a/README.md +++ b/README.md @@ -327,3 +327,13 @@ Documentation --------------- Documentation resides over at [pipenv.pypa.io](https://pipenv.pypa.io/en/latest/). + +## Star History + + + + + + Star History Chart + + diff --git a/docs/advanced.md b/docs/advanced.md index 4c10ce803a..827977ed4b 100644 --- a/docs/advanced.md +++ b/docs/advanced.md @@ -234,7 +234,7 @@ Example: Note -Each month, [PyUp.io](https://pyup.io>)updates the `safety` database of insecure Python packages and [makes it available to the open source community for free](https://pyup.io/safety/). Each time you run `pipenv check` to show you vulnerable dependencies, +Each month, [PyUp.io](https://pyup.io) updates the `safety` database of insecure Python packages and [makes it available to the open source community for free](https://pyup.io/safety/). Each time you run `pipenv check` to show you vulnerable dependencies, Pipenv makes an API call to retrieve and use those results. For more up-to-date vulnerability data, you may also use your own safety API key by setting the environment variable `PIPENV_PYUP_API_KEY`. @@ -269,6 +269,8 @@ Note The standard `EDITOR` environment variable is used for this. If you're using VS Code, for example, you'll want to `export EDITOR=code` (if you're on macOS you will want to [install the command](https://code.visualstudio.com/docs/setup/mac#_launching-from-the-command-line) on to your `PATH` first). +## ☤ Automatic Python Installation + This is a very fancy feature, and we're very proud of it: $ cat Pipfile diff --git a/docs/pipfile.md b/docs/pipfile.md index b17a59c3e3..e7f231b6bf 100644 --- a/docs/pipfile.md +++ b/docs/pipfile.md @@ -2,7 +2,7 @@ `Pipfile` contains the specification for the project top-level requirements and any desired specifiers. This file is managed by the developers invoking pipenv commands. -The `Pipfile` uses inline tables and the [TOML Spec](https://github.com/toml-lang/toml#user-content-spec>). +The `Pipfile` uses inline tables and the [TOML Spec](https://toml.io/en/latest#spec). `Pipfile.lock` replaces the `requirements.txt` file used in most Python projects and adds security benefits of tracking the packages hashes that were last locked. @@ -10,6 +10,16 @@ This file is managed automatically through locking actions. You should add both `Pipfile` and `Pipfile.lock` to the project's source control. +## `[pipenv]` Directives + +`Pipfile` may contain a `[pipenv]` section to control the behaviour of pipenv itself. Some available settings include: + +* `allow_prereleases` - Tell pipenv to install pre-release versions of a package -i.e. a version with an alpha/beta/etc. suffix, such as _1.0b1_. Equivalent to passing the `--pre` flag on the command line. +* `disable_pip_input` - Prevent pipenv from asking for input. Equivalent to the `--no-input` flag. +* `install_search_all_sources` - Allow installation of packages from an existing `Pipfile.lock` to search all defined indexes for the constrained package version and hash signatures. See [Specifying Package Indexes](indexes.md). +* `sort_pipfile` - Sort package names alphabetically inside each category. Categories will be sorted and updated on `install` and `uninstall`. This is purely cosmetic to make reading easier for humans, and has no effect on installation order or dependency resolution. Note that `Pipfile.lock` packages are always sorted alphabetically. + + ## Example Pipfile Here is a simple example of a `Pipfile` and the resulting `Pipfile.lock`. diff --git a/docs/requirements.txt b/docs/requirements.txt index 7079f46f1e..899fc02ad6 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -18,6 +18,6 @@ Sphinx==4.5.0 sphinx-click==4.4.0 sphinxcontrib-spelling==7.7.0 sphinxcontrib-websupport==1.2.4 -urllib3==1.26.14 +urllib3==1.26.18 virtualenv>=20.20.0 virtualenv-clone==0.5.7 diff --git a/docs/shell.md b/docs/shell.md index 4f5dcebccd..743cf89afc 100644 --- a/docs/shell.md +++ b/docs/shell.md @@ -43,7 +43,7 @@ To prevent pipenv from loading the `.env` file, set the `PIPENV_DONT_LOAD_ENV` e $ PIPENV_DONT_LOAD_ENV=1 pipenv shell -See [theskumar/python-dotenv](https://github.com/theskumar/python-dotenv>) for more information on `.env` files. +See [theskumar/python-dotenv](https://github.com/theskumar/python-dotenv) for more information on `.env` files. ## Shell Completion diff --git a/docs/workflows.md b/docs/workflows.md index ddbb69506f..5d790a6d88 100644 --- a/docs/workflows.md +++ b/docs/workflows.md @@ -13,7 +13,7 @@ Add a package to your project, recalibrating entire lock file using the Pipfile $ pipenv install - Note: This will create a `Pipfile` if one doesn't exist. If one does exist, it will automatically be edited with the new package you provided, the lock file updated and the new dependencies installed. -- `pipenv install` is fully compatible with `pip install` [package specifiers](https://pip.pypa.io/en/stable/user_guide/#installing-packages>). +- `pipenv install` is fully compatible with `pip install` [package specifiers](https://pip.pypa.io/en/stable/user_guide/#installing-packages). - Additional arguments may be supplied to `pip` by supplying `pipenv` with `--extra-pip-args`. Update everything (equivalent to `pipenv lock && pipenv sync`): diff --git a/examples/Pipfile.lock b/examples/Pipfile.lock index cc811cc69d..659792494a 100644 --- a/examples/Pipfile.lock +++ b/examples/Pipfile.lock @@ -16,92 +16,107 @@ "default": { "certifi": { "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" + "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", + "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" ], "markers": "python_version >= '3.6'", - "version": "==2023.5.7" + "version": "==2023.7.22" }, "charset-normalizer": { "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" + "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843", + "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786", + "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e", + "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8", + "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4", + "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa", + "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d", + "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82", + "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7", + "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895", + "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d", + "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a", + "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382", + "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678", + "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b", + "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e", + "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741", + "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4", + "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596", + "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9", + "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69", + "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c", + "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77", + "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13", + "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459", + "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e", + "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7", + "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908", + "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a", + "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f", + "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8", + "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482", + "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d", + "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d", + "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545", + "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34", + "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86", + "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6", + "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe", + "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e", + "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc", + "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7", + "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd", + "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c", + "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557", + "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a", + "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89", + "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078", + "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e", + "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4", + "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403", + "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0", + "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89", + "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115", + "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9", + "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05", + "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a", + "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec", + "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56", + "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38", + "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479", + "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c", + "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e", + "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd", + "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186", + "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455", + "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c", + "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65", + "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78", + "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287", + "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df", + "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43", + "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1", + "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7", + "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989", + "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a", + "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63", + "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884", + "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649", + "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810", + "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828", + "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4", + "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2", + "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd", + "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5", + "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe", + "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293", + "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e", + "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e", + "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8" ], - "markers": "python_version >= '3.7'", - "version": "==3.1.0" + "markers": "python_full_version >= '3.7.0'", + "version": "==3.3.0" }, "idna": { "hashes": [ @@ -117,15 +132,17 @@ "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.31.0" }, "urllib3": { "hashes": [ - "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc", - "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e" + "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", + "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e" ], + "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==2.0.2" + "version": "==2.0.7" } }, "develop": { @@ -147,11 +164,11 @@ }, "setuptools": { "hashes": [ - "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f", - "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102" + "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87", + "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a" ], - "markers": "python_version >= '3.7'", - "version": "==67.8.0" + "markers": "python_version >= '3.8'", + "version": "==68.2.2" } } } diff --git a/pipenv/__version__.py b/pipenv/__version__.py index 1ef3ace11b..1c64fe0866 100644 --- a/pipenv/__version__.py +++ b/pipenv/__version__.py @@ -2,4 +2,4 @@ # // ) ) / / // ) ) //___) ) // ) ) || / / # //___/ / / / //___/ / // // / / || / / # // / / // ((____ // / / ||/ / -__version__ = "2023.9.9.dev0" +__version__ = "2023.10.20.dev0" diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 5e7d88a721..57fe060451 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -125,7 +125,7 @@ def cli( do_where(state.project, bare=True) return 0 elif py: - do_py(state.project, ctx=ctx) + do_py(state.project, ctx=ctx, bare=True) return 0 # --support was passed... elif support: @@ -364,16 +364,14 @@ def lock(ctx, state, **kwargs): default=False, help="Always spawn a sub-shell, even if one is already spawned.", ) +@option( + "--quiet", is_flag=True, help="Quiet standard output, except vulnerability report." +) @argument("shell_args", nargs=-1) @pypi_mirror_option @python_option @pass_state -def shell( - state, - fancy=False, - shell_args=None, - anyway=False, -): +def shell(state, fancy=False, shell_args=None, anyway=False, quiet=False): """Spawns a shell within the virtualenv.""" from pipenv.routines.shell import do_shell @@ -399,6 +397,7 @@ def shell( fancy=fancy, shell_args=shell_args, pypi_mirror=state.pypi_mirror, + quiet=quiet, ) @@ -520,6 +519,7 @@ def check( output=output, key=key, quiet=quiet, + verbose=state.verbose, exit_code=exit_code, policy_file=policy_file, save_json=save_json, @@ -757,7 +757,7 @@ def requirements( cli() -def do_py(project, ctx=None, system=False): +def do_py(project, ctx=None, system=False, bare=False): if not project.virtualenv_exists: err.print( "[red]No virtualenv has been created for this project[/red] " @@ -767,6 +767,7 @@ def do_py(project, ctx=None, system=False): ctx.abort() try: - console.print(project._which("python", allow_global=system)) + (print if bare else console.print)(project._which("python", allow_global=system)) except AttributeError: console.print("No project found!", style="red") + ctx.abort() diff --git a/pipenv/environment.py b/pipenv/environment.py index 100cfc61e9..9868ab6b21 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -9,6 +9,7 @@ import site import sys import typing +from functools import cached_property from pathlib import Path from sysconfig import get_paths, get_python_version, get_scheme_names from urllib.parse import urlparse @@ -30,14 +31,6 @@ from pipenv.utils.shell import make_posix, temp_environ from pipenv.vendor.pythonfinder.utils import is_in_path -try: - # this is only in Python3.8 and later - from functools import cached_property -except ImportError: - # eventually distlib will remove cached property when they drop Python3.7 - from pipenv.patched.pip._vendor.distlib.util import cached_property - - if typing.TYPE_CHECKING: from types import ModuleType from typing import ContextManager, Generator @@ -776,7 +769,13 @@ def is_satisfied(self, req: InstallRequirement): None, ) if match is not None: - if req.editable and req.link and req.link.is_file: + if req.specifier is not None: + return SpecifierSet(str(req.specifier)).contains( + match.version, prereleases=True + ) + if req.link is None: + return True + elif req.editable and req.link.is_file: requested_path = req.link.file_path if os.path.exists(requested_path): local_path = requested_path @@ -801,12 +800,8 @@ def is_satisfied(self, req: InstallRequirement): and vcs_ref == requested_revision and direct_url_metadata["url"] == pipfile_part[req.link.scheme] ) - elif req.link and req.link.is_vcs: + elif req.link.is_vcs: return False - elif req.specifier is not None: - return SpecifierSet(str(req.specifier)).contains( - match.version, prereleases=True - ) return True return False diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index 8a7bcad793..b4051e35cf 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -1,2 +1,2 @@ -pip==23.2.1 +pip==23.3 safety==2.3.2 diff --git a/pipenv/patched/pip/__init__.py b/pipenv/patched/pip/__init__.py index ed82716787..4259bb5cf9 100644 --- a/pipenv/patched/pip/__init__.py +++ b/pipenv/patched/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.1" +__version__ = "23.3" def main(args: Optional[List[str]] = None) -> int: diff --git a/pipenv/patched/pip/_internal/__init__.py b/pipenv/patched/pip/_internal/__init__.py index 565ca37cf4..db29c0654a 100644 --- a/pipenv/patched/pip/_internal/__init__.py +++ b/pipenv/patched/pip/_internal/__init__.py @@ -1,6 +1,5 @@ from typing import List, Optional -import pipenv.patched.pip._internal.utils.inject_securetransport # noqa from pipenv.patched.pip._internal.utils import _log # init_logging() must be called before any call to logging.getLogger() diff --git a/pipenv/patched/pip/_internal/cache.py b/pipenv/patched/pip/_internal/cache.py index 7a2fd8047f..234f03fc09 100644 --- a/pipenv/patched/pip/_internal/cache.py +++ b/pipenv/patched/pip/_internal/cache.py @@ -78,12 +78,10 @@ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: if can_not_cache: return [] - candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): - for candidate in os.listdir(path): - candidates.append((candidate, path)) - return candidates + return [(candidate, path) for candidate in os.listdir(path)] + return [] def get_path_for_link(self, link: Link) -> str: """Return a directory to store cached items in for link.""" diff --git a/pipenv/patched/pip/_internal/cli/autocompletion.py b/pipenv/patched/pip/_internal/cli/autocompletion.py index 5144fa9ddc..efe5429a94 100644 --- a/pipenv/patched/pip/_internal/cli/autocompletion.py +++ b/pipenv/patched/pip/_internal/cli/autocompletion.py @@ -71,8 +71,9 @@ def autocomplete() -> None: for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) + options += [ + (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts + ] # filter out previously specified options from available options prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] diff --git a/pipenv/patched/pip/_internal/cli/base_command.py b/pipenv/patched/pip/_internal/cli/base_command.py index b89f71a3f9..be1a077695 100644 --- a/pipenv/patched/pip/_internal/cli/base_command.py +++ b/pipenv/patched/pip/_internal/cli/base_command.py @@ -181,7 +181,7 @@ def exc_logging_wrapper(*args: Any) -> int: assert isinstance(status, int) return status except DiagnosticPipError as exc: - logger.error("[present-rich] %s", exc) + logger.error("%s", exc, extra={"rich": True}) logger.debug("Exception information:", exc_info=True) return ERROR diff --git a/pipenv/patched/pip/_internal/cli/cmdoptions.py b/pipenv/patched/pip/_internal/cli/cmdoptions.py index dfd4079f96..971af45642 100644 --- a/pipenv/patched/pip/_internal/cli/cmdoptions.py +++ b/pipenv/patched/pip/_internal/cli/cmdoptions.py @@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None: ) if check_target: - if dist_restriction_set and not options.target_dir: + if not options.dry_run and dist_restriction_set and not options.target_dir: raise CommandError( "Can not use any platform or abi specific options unless " - "installing via '--target'" + "installing via '--target' or using '--dry-run'" ) @@ -670,7 +670,10 @@ def prefer_binary() -> Option: dest="prefer_binary", action="store_true", default=False, - help="Prefer older binary packages over newer source packages.", + help=( + "Prefer binary packages over source packages, even if the " + "source packages are newer." + ), ) @@ -823,7 +826,7 @@ def _handle_config_settings( ) -> None: key, sep, val = value.partition("=") if sep != "=": - parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") dest = getattr(parser.values, option.dest) if dest is None: dest = {} @@ -918,13 +921,13 @@ def _handle_merge_hash( algo, digest = value.split(":", 1) except ValueError: parser.error( - "Arguments to {} must be a hash name " # noqa + "Arguments to {} must be a hash name " "followed by a value, like --hash=sha256:" "abcde...".format(opt_str) ) if algo not in STRONG_HASHES: parser.error( - "Allowed hash algorithms for {} are {}.".format( # noqa + "Allowed hash algorithms for {} are {}.".format( opt_str, ", ".join(STRONG_HASHES) ) ) diff --git a/pipenv/patched/pip/_internal/cli/parser.py b/pipenv/patched/pip/_internal/cli/parser.py index a1e851cfcf..e3441fb9b0 100644 --- a/pipenv/patched/pip/_internal/cli/parser.py +++ b/pipenv/patched/pip/_internal/cli/parser.py @@ -229,7 +229,7 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = strtobool(val) except ValueError: self.error( - "{} is not a valid value for {} option, " # noqa + "{} is not a valid value for {} option, " "please specify a boolean value like yes/no, " "true/false or 1/0 instead.".format(val, key) ) @@ -240,7 +240,7 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = int(val) if not isinstance(val, int) or val < 0: self.error( - "{} is not a valid value for {} option, " # noqa + "{} is not a valid value for {} option, " "please instead specify either a non-negative integer " "or a boolean value like yes/no or false/true " "which is equivalent to 1/0.".format(val, key) diff --git a/pipenv/patched/pip/_internal/cli/req_command.py b/pipenv/patched/pip/_internal/cli/req_command.py index 9333eb45a5..54b798ac6a 100644 --- a/pipenv/patched/pip/_internal/cli/req_command.py +++ b/pipenv/patched/pip/_internal/cli/req_command.py @@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: return None try: - import truststore - except ImportError: - raise CommandError( - "To use the truststore feature, 'truststore' must be installed into " - "pip's current environment." - ) + from pipenv.patched.pip._vendor import truststore + except ImportError as e: + raise CommandError(f"The truststore feature is unavailable: {e}") return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -123,7 +120,7 @@ def _build_session( ssl_context = None session = PipSession( - cache=os.path.join(cache_dir, "http") if cache_dir else None, + cache=os.path.join(cache_dir, "http-v2") if cache_dir else None, retries=retries if retries is not None else options.retries, trusted_hosts=options.trusted_hosts, index_urls=self._get_index_urls(options), @@ -268,7 +265,7 @@ def determine_resolver_variant(options: Values) -> str: if "legacy-resolver" in options.deprecated_features_enabled: return "legacy" - return "2020-resolver" + return "resolvelib" @classmethod def make_requirement_preparer( @@ -290,7 +287,7 @@ def make_requirement_preparer( legacy_resolver = False resolver_variant = cls.determine_resolver_variant(options) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": lazy_wheel = "fast-deps" in options.features_enabled if lazy_wheel: logger.warning( @@ -352,7 +349,7 @@ def make_resolver( # The long import name and duplicated invocation is needed to convince # Mypy into correctly typechecking. Otherwise it would complain the # "Resolver" class being redefined. - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": import pipenv.patched.pip._internal.resolution.resolvelib.resolver return pipenv.patched.pip._internal.resolution.resolvelib.resolver.Resolver( diff --git a/pipenv/patched/pip/_internal/commands/cache.py b/pipenv/patched/pip/_internal/commands/cache.py index 4786dea75e..fd8cb33f23 100644 --- a/pipenv/patched/pip/_internal/commands/cache.py +++ b/pipenv/patched/pip/_internal/commands/cache.py @@ -3,10 +3,10 @@ from optparse import Values from typing import Any, List -import pipenv.patched.pip._internal.utils.filesystem as filesystem from pipenv.patched.pip._internal.cli.base_command import Command from pipenv.patched.pip._internal.cli.status_codes import ERROR, SUCCESS from pipenv.patched.pip._internal.exceptions import CommandError, PipError +from pipenv.patched.pip._internal.utils import filesystem from pipenv.patched.pip._internal.utils.logging import getLogger logger = getLogger(__name__) @@ -93,24 +93,30 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: num_http_files = len(self._find_http_files(options)) num_packages = len(self._find_wheels(options, "*")) - http_cache_location = self._cache_dir(options, "http") + http_cache_location = self._cache_dir(options, "http-v2") + old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") - http_cache_size = filesystem.format_directory_size(http_cache_location) + http_cache_size = filesystem.format_size( + filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location) + ) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) message = ( textwrap.dedent( """ - Package index page cache location: {http_cache_location} + Package index page cache location (pip v23.3+): {http_cache_location} + Package index page cache location (older pips): {old_http_cache_location} Package index page cache size: {http_cache_size} Number of HTTP files: {num_http_files} Locally built wheels location: {wheels_cache_location} Locally built wheels size: {wheels_cache_size} Number of locally built wheels: {package_count} - """ + """ # noqa: E501 ) .format( http_cache_location=http_cache_location, + old_http_cache_location=old_http_cache_location, http_cache_size=http_cache_size, num_http_files=num_http_files, wheels_cache_location=wheels_cache_location, @@ -151,14 +157,8 @@ def format_for_human(self, files: List[str]) -> None: logger.info("\n".join(sorted(results))) def format_for_abspath(self, files: List[str]) -> None: - if not files: - return - - results = [] - for filename in files: - results.append(filename) - - logger.info("\n".join(sorted(results))) + if files: + logger.info("\n".join(sorted(files))) def remove_cache_items(self, options: Values, args: List[Any]) -> None: if len(args) > 1: @@ -195,8 +195,11 @@ def _cache_dir(self, options: Values, subdir: str) -> str: return os.path.join(options.cache_dir, subdir) def _find_http_files(self, options: Values) -> List[str]: - http_dir = self._cache_dir(options, "http") - return filesystem.find_files(http_dir, "*") + old_http_dir = self._cache_dir(options, "http") + new_http_dir = self._cache_dir(options, "http-v2") + return filesystem.find_files(old_http_dir, "*") + filesystem.find_files( + new_http_dir, "*" + ) def _find_wheels(self, options: Values, pattern: str) -> List[str]: wheel_dir = self._cache_dir(options, "wheels") diff --git a/pipenv/patched/pip/_internal/commands/completion.py b/pipenv/patched/pip/_internal/commands/completion.py index ccf3c3d26b..e782195d83 100644 --- a/pipenv/patched/pip/_internal/commands/completion.py +++ b/pipenv/patched/pip/_internal/commands/completion.py @@ -23,9 +23,18 @@ """, "zsh": """ #compdef -P pip[0-9.]# - compadd $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$((CURRENT-1)) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + __pip() {{ + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + }} + if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" + else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' + fi """, "fish": """ function __fish_complete_pip diff --git a/pipenv/patched/pip/_internal/commands/debug.py b/pipenv/patched/pip/_internal/commands/debug.py index d35a8ec7b9..1fd62cc608 100644 --- a/pipenv/patched/pip/_internal/commands/debug.py +++ b/pipenv/patched/pip/_internal/commands/debug.py @@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]: return dict(line.split("==", 1) for line in lines) -def get_module_from_module_name(module_name: str) -> ModuleType: +def get_module_from_module_name(module_name: str) -> Optional[ModuleType]: # Module name can be uppercase in vendor.txt for some reason... module_name = module_name.lower().replace("-", "_") # PATCH: setuptools is actually only pkg_resources. if module_name == "setuptools": module_name = "pkg_resources" - __import__(f"pipenv.patched.pip._vendor.{module_name}", globals(), locals(), level=0) - return getattr(pipenv.patched.pip._vendor, module_name) + try: + __import__(f"pipenv.patched.pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pipenv.patched.pip._vendor, module_name) + except ImportError: + # We allow 'truststore' to fail to import due + # to being unavailable on Python 3.9 and earlier. + if module_name == "truststore" and sys.version_info < (3, 10): + return None + raise def get_vendor_version_from_module(module_name: str) -> Optional[str]: module = get_module_from_module_name(module_name) version = getattr(module, "__version__", None) - if not version: + if module and not version: # Try to find version in debundled module info. assert module.__file__ is not None env = get_environment([os.path.dirname(module.__file__)]) @@ -105,7 +112,7 @@ def show_tags(options: Values) -> None: tag_limit = 10 target_python = make_target_python(options) - tags = target_python.get_tags() + tags = target_python.get_sorted_tags() # Display the target options that were explicitly provided. formatted_target = target_python.format_given() @@ -134,10 +141,7 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: - levels = set() - for key, _ in config.items(): - levels.add(key.split(".")[0]) - + levels = {key.split(".", 1)[0] for key, _ in config.items()} if not levels: return "Not specified" diff --git a/pipenv/patched/pip/_internal/commands/install.py b/pipenv/patched/pip/_internal/commands/install.py index 5bff8fbe3f..0ef79de0c7 100644 --- a/pipenv/patched/pip/_internal/commands/install.py +++ b/pipenv/patched/pip/_internal/commands/install.py @@ -501,7 +501,7 @@ def run(self, options: Values, args: List[str]) -> int: show_traceback, options.use_user_site, ) - logger.error(message, exc_info=show_traceback) # noqa + logger.error(message, exc_info=show_traceback) return ERROR @@ -595,7 +595,7 @@ def _warn_about_conflicts( "source of the following dependency conflicts." ) else: - assert resolver_variant == "2020-resolver" + assert resolver_variant == "resolvelib" parts.append( "pip's dependency resolver does not currently take into account " "all the packages that are installed. This behaviour is the " @@ -628,7 +628,7 @@ def _warn_about_conflicts( requirement=req, dep_name=dep_name, dep_version=dep_version, - you=("you" if resolver_variant == "2020-resolver" else "you'll"), + you=("you" if resolver_variant == "resolvelib" else "you'll"), ) parts.append(message) diff --git a/pipenv/patched/pip/_internal/commands/list.py b/pipenv/patched/pip/_internal/commands/list.py index e9cfb92b79..c629efb797 100644 --- a/pipenv/patched/pip/_internal/commands/list.py +++ b/pipenv/patched/pip/_internal/commands/list.py @@ -297,7 +297,7 @@ def output_package_listing_columns( # Create and add a separator. if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) + pkg_strings.insert(1, " ".join("-" * x for x in sizes)) for val in pkg_strings: write_output(val) diff --git a/pipenv/patched/pip/_internal/distributions/base.py b/pipenv/patched/pip/_internal/distributions/base.py index 9adaf29a59..24100618d5 100644 --- a/pipenv/patched/pip/_internal/distributions/base.py +++ b/pipenv/patched/pip/_internal/distributions/base.py @@ -1,4 +1,5 @@ import abc +from typing import Optional from pipenv.patched.pip._internal.index.package_finder import PackageFinder from pipenv.patched.pip._internal.metadata.base import BaseDistribution @@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta): - we must be able to create a Distribution object exposing the above metadata. + + - if we need to do work in the build tracker, we must be able to generate a unique + string to identify the requirement in the build tracker. """ def __init__(self, req: InstallRequirement) -> None: super().__init__() self.req = req + @abc.abstractproperty + def build_tracker_id(self) -> Optional[str]: + """A string that uniquely identifies this requirement to the build tracker. + + If None, then this dist has no work to do in the build tracker, and + ``.prepare_distribution_metadata()`` will not be called.""" + raise NotImplementedError() + @abc.abstractmethod def get_metadata_distribution(self) -> BaseDistribution: raise NotImplementedError() diff --git a/pipenv/patched/pip/_internal/distributions/installed.py b/pipenv/patched/pip/_internal/distributions/installed.py index 18b6bf9fdc..d21bc46ed9 100644 --- a/pipenv/patched/pip/_internal/distributions/installed.py +++ b/pipenv/patched/pip/_internal/distributions/installed.py @@ -1,3 +1,5 @@ +from typing import Optional + from pipenv.patched.pip._internal.distributions.base import AbstractDistribution from pipenv.patched.pip._internal.index.package_finder import PackageFinder from pipenv.patched.pip._internal.metadata import BaseDistribution @@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution): been computed. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: assert self.req.satisfied_by is not None, "not actually installed" return self.req.satisfied_by diff --git a/pipenv/patched/pip/_internal/distributions/sdist.py b/pipenv/patched/pip/_internal/distributions/sdist.py index 55574189ff..31b2594fe2 100644 --- a/pipenv/patched/pip/_internal/distributions/sdist.py +++ b/pipenv/patched/pip/_internal/distributions/sdist.py @@ -1,5 +1,5 @@ import logging -from typing import Iterable, Set, Tuple +from typing import Iterable, Optional, Set, Tuple from pipenv.patched.pip._internal.build_env import BuildEnvironment from pipenv.patched.pip._internal.distributions.base import AbstractDistribution @@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution): generated, either using PEP 517 or using the legacy `setup.py egg_info`. """ + @property + def build_tracker_id(self) -> Optional[str]: + """Identify this requirement uniquely by its link.""" + assert self.req.link + return self.req.link.url_without_fragment + def get_metadata_distribution(self) -> BaseDistribution: return self.req.get_dist() diff --git a/pipenv/patched/pip/_internal/distributions/wheel.py b/pipenv/patched/pip/_internal/distributions/wheel.py index c859f6b13f..ff8ac95f36 100644 --- a/pipenv/patched/pip/_internal/distributions/wheel.py +++ b/pipenv/patched/pip/_internal/distributions/wheel.py @@ -1,3 +1,5 @@ +from typing import Optional + from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name from pipenv.patched.pip._internal.distributions.base import AbstractDistribution @@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution): This does not need any preparation as wheels can be directly unpacked. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: """Loads the metadata from the wheel file into memory and returns a Distribution that uses it, not relying on the wheel file or diff --git a/pipenv/patched/pip/_internal/index/package_finder.py b/pipenv/patched/pip/_internal/index/package_finder.py index cc81f35caf..31218b0678 100644 --- a/pipenv/patched/pip/_internal/index/package_finder.py +++ b/pipenv/patched/pip/_internal/index/package_finder.py @@ -202,7 +202,7 @@ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: reason = f"wrong project name (not {self.project_name})" return (LinkType.different_project, reason) - supported_tags = self._target_python.get_tags() + supported_tags = self._target_python.get_unsorted_tags() if not wheel.supported(supported_tags) and not self._ignore_compatibility: # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. @@ -418,7 +418,7 @@ def create( if specifier is None: specifier = specifiers.SpecifierSet() - supported_tags = target_python.get_tags() + supported_tags = target_python.get_sorted_tags() return cls( project_name=project_name, @@ -494,7 +494,7 @@ def get_applicable_candidates( def _sort_key( self, candidate: InstallationCandidate, - ignore_compatibility: bool = True + ignore_compatibility: bool = True, ) -> CandidateSortingKey: """ Function to pass as the `key` argument to a call to sorted() to sort diff --git a/pipenv/patched/pip/_internal/locations/_distutils.py b/pipenv/patched/pip/_internal/locations/_distutils.py index 6222b77cb3..43bea7a09e 100644 --- a/pipenv/patched/pip/_internal/locations/_distutils.py +++ b/pipenv/patched/pip/_internal/locations/_distutils.py @@ -89,7 +89,7 @@ def distutils_scheme( # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if "install_lib" in d.get_option_dict("install"): - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + scheme.update({"purelib": i.install_lib, "platlib": i.install_lib}) if running_under_virtualenv(): if home: diff --git a/pipenv/patched/pip/_internal/metadata/__init__.py b/pipenv/patched/pip/_internal/metadata/__init__.py index f6ac515bc4..d3e763dff8 100644 --- a/pipenv/patched/pip/_internal/metadata/__init__.py +++ b/pipenv/patched/pip/_internal/metadata/__init__.py @@ -9,7 +9,7 @@ from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel if TYPE_CHECKING: - from typing import Protocol + from typing import Literal, Protocol else: Protocol = object @@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool: class Backend(Protocol): + NAME: 'Literal["importlib", "pkg_resources"]' Distribution: Type[BaseDistribution] Environment: Type[BaseEnvironment] diff --git a/pipenv/patched/pip/_internal/metadata/base.py b/pipenv/patched/pip/_internal/metadata/base.py index 375c3fddee..1c76704a5a 100644 --- a/pipenv/patched/pip/_internal/metadata/base.py +++ b/pipenv/patched/pip/_internal/metadata/base.py @@ -24,7 +24,7 @@ from pipenv.patched.pip._vendor.packaging.requirements import Requirement from pipenv.patched.pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet -from pipenv.patched.pip._vendor.packaging.utils import NormalizedName +from pipenv.patched.pip._vendor.packaging.utils import NormalizedName, canonicalize_name from pipenv.patched.pip._vendor.packaging.version import LegacyVersion, Version from pipenv.patched.pip._internal.exceptions import NoneMetadataError @@ -37,7 +37,6 @@ from pipenv.patched.pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. from pipenv.patched.pip._internal.utils.egg_link import egg_link_path_from_sys_path from pipenv.patched.pip._internal.utils.misc import is_local, normalize_path -from pipenv.patched.pip._internal.utils.packaging import safe_extra from pipenv.patched.pip._internal.utils.urls import url_to_path from ._json import msg_to_json @@ -460,6 +459,19 @@ def iter_provided_extras(self) -> Iterable[str]: For modern .dist-info distributions, this is the collection of "Provides-Extra:" entries in distribution metadata. + + The return value of this function is not particularly useful other than + display purposes due to backward compatibility issues and the extra + names being poorly normalized prior to PEP 685. If you want to perform + logic operations on extras, use :func:`is_extra_provided` instead. + """ + raise NotImplementedError() + + def is_extra_provided(self, extra: str) -> bool: + """Check whether an extra is provided by this distribution. + + This is needed mostly for compatibility issues with pkg_resources not + following the extra normalization rules defined in PEP 685. """ raise NotImplementedError() @@ -537,10 +549,11 @@ def _iter_egg_info_extras(self) -> Iterable[str]: """Get extras from the egg-info directory.""" known_extras = {""} for entry in self._iter_requires_txt_entries(): - if entry.extra in known_extras: + extra = canonicalize_name(entry.extra) + if extra in known_extras: continue - known_extras.add(entry.extra) - yield entry.extra + known_extras.add(extra) + yield extra def _iter_egg_info_dependencies(self) -> Iterable[str]: """Get distribution dependencies from the egg-info directory. @@ -556,10 +569,11 @@ def _iter_egg_info_dependencies(self) -> Iterable[str]: all currently available PEP 517 backends, although not standardized. """ for entry in self._iter_requires_txt_entries(): - if entry.extra and entry.marker: - marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"' - elif entry.extra: - marker = f'extra == "{safe_extra(entry.extra)}"' + extra = canonicalize_name(entry.extra) + if extra and entry.marker: + marker = f'({entry.marker}) and extra == "{extra}"' + elif extra: + marker = f'extra == "{extra}"' elif entry.marker: marker = entry.marker else: diff --git a/pipenv/patched/pip/_internal/metadata/importlib/__init__.py b/pipenv/patched/pip/_internal/metadata/importlib/__init__.py index 5e7af9fe52..a779138db1 100644 --- a/pipenv/patched/pip/_internal/metadata/importlib/__init__.py +++ b/pipenv/patched/pip/_internal/metadata/importlib/__init__.py @@ -1,4 +1,6 @@ from ._dists import Distribution from ._envs import Environment -__all__ = ["Distribution", "Environment"] +__all__ = ["NAME", "Distribution", "Environment"] + +NAME = "importlib" diff --git a/pipenv/patched/pip/_internal/metadata/importlib/_dists.py b/pipenv/patched/pip/_internal/metadata/importlib/_dists.py index 9dc0f3549b..7698a5f557 100644 --- a/pipenv/patched/pip/_internal/metadata/importlib/_dists.py +++ b/pipenv/patched/pip/_internal/metadata/importlib/_dists.py @@ -27,7 +27,6 @@ Wheel, ) from pipenv.patched.pip._internal.utils.misc import normalize_path -from pipenv.patched.pip._internal.utils.packaging import safe_extra from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory from pipenv.patched.pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file @@ -208,12 +207,16 @@ def _metadata_impl(self) -> email.message.Message: return cast(email.message.Message, self._dist.metadata) def iter_provided_extras(self) -> Iterable[str]: - return ( - safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", []) + return self.metadata.get_all("Provides-Extra", []) + + def is_extra_provided(self, extra: str) -> bool: + return any( + canonicalize_name(provided_extra) == canonicalize_name(extra) + for provided_extra in self.metadata.get_all("Provides-Extra", []) ) def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: - contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras] + contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras] for req_string in self.metadata.get_all("Requires-Dist", []): req = Requirement(req_string) if not req.marker: diff --git a/pipenv/patched/pip/_internal/metadata/importlib/_envs.py b/pipenv/patched/pip/_internal/metadata/importlib/_envs.py index 5bf08036f8..cab85a68ef 100644 --- a/pipenv/patched/pip/_internal/metadata/importlib/_envs.py +++ b/pipenv/patched/pip/_internal/metadata/importlib/_envs.py @@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None: deprecated( reason=f"Loading egg at {location} is deprecated.", replacement="to use pip for package installation.", - gone_in="23.3", + gone_in="24.3", + issue=12330, ) diff --git a/pipenv/patched/pip/_internal/metadata/pkg_resources.py b/pipenv/patched/pip/_internal/metadata/pkg_resources.py index dcb2bc04c0..d2fbb771a0 100644 --- a/pipenv/patched/pip/_internal/metadata/pkg_resources.py +++ b/pipenv/patched/pip/_internal/metadata/pkg_resources.py @@ -24,8 +24,12 @@ Wheel, ) +__all__ = ["NAME", "Distribution", "Environment"] + logger = logging.getLogger(__name__) +NAME = "pkg_resources" + class EntryPoint(NamedTuple): name: str @@ -212,12 +216,16 @@ def _metadata_impl(self) -> email.message.Message: def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: if extras: # pkg_resources raises on invalid extras, so we sanitize. - extras = frozenset(extras).intersection(self._dist.extras) + extras = frozenset(pkg_resources.safe_extra(e) for e in extras) + extras = extras.intersection(self._dist.extras) return self._dist.requires(extras) def iter_provided_extras(self) -> Iterable[str]: return self._dist.extras + def is_extra_provided(self, extra: str) -> bool: + return pkg_resources.safe_extra(extra) in self._dist.extras + class Environment(BaseEnvironment): def __init__(self, ws: pkg_resources.WorkingSet) -> None: diff --git a/pipenv/patched/pip/_internal/models/installation_report.py b/pipenv/patched/pip/_internal/models/installation_report.py index f362606897..d163107da4 100644 --- a/pipenv/patched/pip/_internal/models/installation_report.py +++ b/pipenv/patched/pip/_internal/models/installation_report.py @@ -23,6 +23,9 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # includes editable requirements), and false if the requirement was # downloaded from a PEP 503 index or --find-links. "is_direct": ireq.is_direct, + # is_yanked is true if the requirement was yanked from the index, but + # was still selected by pip to conform to PEP 592. + "is_yanked": ireq.link.is_yanked if ireq.link else False, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a # requirement. https://peps.python.org/pep-0376/#requested @@ -33,7 +36,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: } if ireq.user_supplied and ireq.extras: # For top level requirements, the list of requested extras, if any. - res["requested_extras"] = list(sorted(ireq.extras)) + res["requested_extras"] = sorted(ireq.extras) return res def to_dict(self) -> Dict[str, Any]: diff --git a/pipenv/patched/pip/_internal/models/target_python.py b/pipenv/patched/pip/_internal/models/target_python.py index 135899841e..39908604dd 100644 --- a/pipenv/patched/pip/_internal/models/target_python.py +++ b/pipenv/patched/pip/_internal/models/target_python.py @@ -1,5 +1,5 @@ import sys -from typing import List, Optional, Tuple +from typing import List, Optional, Set, Tuple from pipenv.patched.pip._vendor.packaging.tags import Tag @@ -22,6 +22,7 @@ class TargetPython: "py_version", "py_version_info", "_valid_tags", + "_valid_tags_set", ] def __init__( @@ -61,8 +62,9 @@ def __init__( self.py_version = py_version self.py_version_info = py_version_info - # This is used to cache the return value of get_tags(). + # This is used to cache the return value of get_(un)sorted_tags. self._valid_tags: Optional[List[Tag]] = None + self._valid_tags_set: Optional[Set[Tag]] = None def format_given(self) -> str: """ @@ -84,7 +86,7 @@ def format_given(self) -> str: f"{key}={value!r}" for key, value in key_values if value is not None ) - def get_tags(self) -> List[Tag]: + def get_sorted_tags(self) -> List[Tag]: """ Return the supported PEP 425 tags to check wheel candidates against. @@ -108,3 +110,13 @@ def get_tags(self) -> List[Tag]: self._valid_tags = tags return self._valid_tags + + def get_unsorted_tags(self) -> Set[Tag]: + """Exactly the same as get_sorted_tags, but returns a set. + + This is important for performance. + """ + if self._valid_tags_set is None: + self._valid_tags_set = set(self.get_sorted_tags()) + + return self._valid_tags_set diff --git a/pipenv/patched/pip/_internal/network/cache.py b/pipenv/patched/pip/_internal/network/cache.py index 0ad637691c..ab5c5ee32d 100644 --- a/pipenv/patched/pip/_internal/network/cache.py +++ b/pipenv/patched/pip/_internal/network/cache.py @@ -3,10 +3,11 @@ import os from contextlib import contextmanager -from typing import Generator, Optional +from datetime import datetime +from typing import BinaryIO, Generator, Optional, Union -from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache -from pipenv.patched.pip._vendor.cachecontrol.caches import FileCache +from pipenv.patched.pip._vendor.cachecontrol.cache import SeparateBodyBaseCache +from pipenv.patched.pip._vendor.cachecontrol.caches import SeparateBodyFileCache from pipenv.patched.pip._vendor.requests.models import Response from pipenv.patched.pip._internal.utils.filesystem import adjacent_tmp_file, replace @@ -28,7 +29,7 @@ def suppressed_cache_errors() -> Generator[None, None, None]: pass -class SafeFileCache(BaseCache): +class SafeFileCache(SeparateBodyBaseCache): """ A file based cache which is safe to use even when the target directory may not be accessible or writable. @@ -43,7 +44,7 @@ def _get_cache_path(self, name: str) -> str: # From cachecontrol.caches.file_cache.FileCache._fn, brought into our # class for backwards-compatibility and to avoid using a non-public # method. - hashed = FileCache.encode(name) + hashed = SeparateBodyFileCache.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) @@ -53,17 +54,33 @@ def get(self, key: str) -> Optional[bytes]: with open(path, "rb") as f: return f.read() - def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: - path = self._get_cache_path(key) + def _write(self, path: str, data: bytes) -> None: with suppressed_cache_errors(): ensure_dir(os.path.dirname(path)) with adjacent_tmp_file(path) as f: - f.write(value) + f.write(data) replace(f.name, path) + def set( + self, key: str, value: bytes, expires: Union[int, datetime, None] = None + ) -> None: + path = self._get_cache_path(key) + self._write(path, value) + def delete(self, key: str) -> None: path = self._get_cache_path(key) with suppressed_cache_errors(): os.remove(path) + with suppressed_cache_errors(): + os.remove(path + ".body") + + def get_body(self, key: str) -> Optional[BinaryIO]: + path = self._get_cache_path(key) + ".body" + with suppressed_cache_errors(): + return open(path, "rb") + + def set_body(self, key: str, body: bytes) -> None: + path = self._get_cache_path(key) + ".body" + self._write(path, body) diff --git a/pipenv/patched/pip/_internal/operations/build/build_tracker.py b/pipenv/patched/pip/_internal/operations/build/build_tracker.py index d7e22c51b9..7bc4eaba65 100644 --- a/pipenv/patched/pip/_internal/operations/build/build_tracker.py +++ b/pipenv/patched/pip/_internal/operations/build/build_tracker.py @@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]: yield tracker +class TrackerId(str): + """Uniquely identifying string provided to the build tracker.""" + + class BuildTracker: + """Ensure that an sdist cannot request itself as a setup requirement. + + When an sdist is prepared, it identifies its setup requirements in the + context of ``BuildTracker.track()``. If a requirement shows up recursively, this + raises an exception. + + This stops fork bombs embedded in malicious packages.""" + def __init__(self, root: str) -> None: self._root = root - self._entries: Set[InstallRequirement] = set() + self._entries: Dict[TrackerId, InstallRequirement] = {} logger.debug("Created build tracker: %s", self._root) def __enter__(self) -> "BuildTracker": @@ -69,16 +81,15 @@ def __exit__( ) -> None: self.cleanup() - def _entry_path(self, link: Link) -> str: - hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + def _entry_path(self, key: TrackerId) -> str: + hashed = hashlib.sha224(key.encode()).hexdigest() return os.path.join(self._root, hashed) - def add(self, req: InstallRequirement) -> None: + def add(self, req: InstallRequirement, key: TrackerId) -> None: """Add an InstallRequirement to build tracking.""" - assert req.link # Get the file to write information about this requirement. - entry_path = self._entry_path(req.link) + entry_path = self._entry_path(key) # Try reading from the file. If it exists and can be read from, a build # is already in progress, so a LookupError is raised. @@ -92,33 +103,37 @@ def add(self, req: InstallRequirement) -> None: raise LookupError(message) # If we're here, req should really not be building already. - assert req not in self._entries + assert key not in self._entries # Start tracking this requirement. with open(entry_path, "w", encoding="utf-8") as fp: fp.write(str(req)) - self._entries.add(req) + self._entries[key] = req logger.debug("Added %s to build tracker %r", req, self._root) - def remove(self, req: InstallRequirement) -> None: + def remove(self, req: InstallRequirement, key: TrackerId) -> None: """Remove an InstallRequirement from build tracking.""" - assert req.link - # Delete the created file and the corresponding entries. - os.unlink(self._entry_path(req.link)) - self._entries.remove(req) + # Delete the created file and the corresponding entry. + os.unlink(self._entry_path(key)) + del self._entries[key] logger.debug("Removed %s from build tracker %r", req, self._root) def cleanup(self) -> None: - for req in set(self._entries): - self.remove(req) + for key, req in list(self._entries.items()): + self.remove(req, key) logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager - def track(self, req: InstallRequirement) -> Generator[None, None, None]: - self.add(req) + def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]: + """Ensure that `key` cannot install itself as a setup requirement. + + :raises LookupError: If `key` was already provided in a parent invocation of + the context introduced by this method.""" + tracker_id = TrackerId(key) + self.add(req, tracker_id) yield - self.remove(req) + self.remove(req, tracker_id) diff --git a/pipenv/patched/pip/_internal/operations/check.py b/pipenv/patched/pip/_internal/operations/check.py index 0edb2b5923..60d2c3b568 100644 --- a/pipenv/patched/pip/_internal/operations/check.py +++ b/pipenv/patched/pip/_internal/operations/check.py @@ -168,7 +168,7 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) for dep in package_details.dependencies: if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -183,5 +183,5 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming dependency specifiers" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) diff --git a/pipenv/patched/pip/_internal/operations/install/wheel.py b/pipenv/patched/pip/_internal/operations/install/wheel.py index 320fed210d..6d1b005798 100644 --- a/pipenv/patched/pip/_internal/operations/install/wheel.py +++ b/pipenv/patched/pip/_internal/operations/install/wheel.py @@ -267,9 +267,9 @@ def get_csv_rows_for_installed( path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) - for installed_record_path in installed.values(): - installed_rows.append((installed_record_path, "", "")) - return installed_rows + return installed_rows + [ + (installed_record_path, "", "") for installed_record_path in installed.values() + ] def get_console_script_specs(console: Dict[str, str]) -> List[str]: diff --git a/pipenv/patched/pip/_internal/operations/prepare.py b/pipenv/patched/pip/_internal/operations/prepare.py index 09cfc65207..48d14a4bf5 100644 --- a/pipenv/patched/pip/_internal/operations/prepare.py +++ b/pipenv/patched/pip/_internal/operations/prepare.py @@ -4,10 +4,10 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -import logging import mimetypes import os import shutil +from pathlib import Path from typing import Dict, Iterable, List, Optional from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name @@ -21,7 +21,6 @@ InstallationError, MetadataInconsistent, NetworkConnectionError, - PreviousBuildDirError, VcsHashUnsupported, ) from pipenv.patched.pip._internal.index.package_finder import PackageFinder @@ -37,6 +36,7 @@ from pipenv.patched.pip._internal.network.session import PipSession from pipenv.patched.pip._internal.operations.build.build_tracker import BuildTracker from pipenv.patched.pip._internal.req.req_install import InstallRequirement +from pipenv.patched.pip._internal.utils._log import getLogger from pipenv.patched.pip._internal.utils.direct_url_helpers import ( direct_url_for_editable, direct_url_from_link, @@ -47,13 +47,13 @@ display_path, hash_file, hide_url, - is_installable_dir, + redact_auth_from_requirement, ) from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory from pipenv.patched.pip._internal.utils.unpacking import unpack_file from pipenv.patched.pip._internal.vcs import vcs -logger = logging.getLogger(__name__) +logger = getLogger(__name__) def _get_prepared_distribution( @@ -65,10 +65,12 @@ def _get_prepared_distribution( ) -> BaseDistribution: """Prepare a distribution for installation.""" abstract_dist = make_distribution_for_install_requirement(req) - with build_tracker.track(req): - abstract_dist.prepare_distribution_metadata( - finder, build_isolation, check_build_deps - ) + tracker_id = abstract_dist.build_tracker_id + if tracker_id is not None: + with build_tracker.track(req, tracker_id): + abstract_dist.prepare_distribution_metadata( + finder, build_isolation, check_build_deps + ) return abstract_dist.get_metadata_distribution() @@ -276,7 +278,7 @@ def _log_preparing_link(self, req: InstallRequirement) -> None: information = str(display_path(req.link.file_path)) else: message = "Collecting %s" - information = str(req.req or req) + information = redact_auth_from_requirement(req.req) if req.req else str(req) # If we used req.req, inject requirement source if available (this # would already be included if we used req directly) @@ -317,21 +319,7 @@ def _ensure_link_req_src_dir( autodelete=True, parallel_builds=parallel_builds, ) - - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # TODO: this check is now probably dead code - if is_installable_dir(req.source_dir): - raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - "pre-existing build directory ({}). This is likely " - "due to a previous installation that failed . pip is " - "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(req, req.source_dir) - ) + req.ensure_pristine_source_checkout() def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # By the time this is called, the requirement's link should have @@ -394,7 +382,7 @@ def _fetch_metadata_using_link_data_attr( if metadata_link is None: return None assert req.req is not None - logger.info( + logger.verbose( "Obtaining dependency information for %s from %s", req.req, metadata_link, @@ -479,20 +467,19 @@ def _complete_partial_requirements( for link, (filepath, _) in batch_download: logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] + # Record the downloaded file path so wheel reqs can extract a Distribution + # in .get_dist(). req.local_file_path = filepath - # TODO: This needs fixing for sdists - # This is an emergency fix for #11847, which reports that - # distributions get downloaded twice when metadata is loaded - # from a PEP 658 standalone metadata file. Setting _downloaded - # fixes this for wheels, but breaks the sdist case (tests - # test_download_metadata). As PyPI is currently only serving - # metadata for wheels, this is not an immediate issue. - # Fixing the problem properly looks like it will require a - # complete refactoring of the `prepare_linked_requirements_more` - # logic, and I haven't a clue where to start on that, so for now - # I have fixed the issue *just* for wheels. - if req.is_wheel: - self._downloaded[req.link.url] = filepath + # Record that the file is downloaded so we don't do it again in + # _prepare_linked_requirement(). + self._downloaded[req.link.url] = filepath + + # If this is an sdist, we need to unpack it after downloading, but the + # .source_dir won't be set up until we are in _prepare_linked_requirement(). + # Add the downloaded archive to the install requirement to unpack after + # preparing the source dir. + if not req.is_wheel: + req.needs_unpacked_archive(Path(filepath)) # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. diff --git a/pipenv/patched/pip/_internal/req/constructors.py b/pipenv/patched/pip/_internal/req/constructors.py index 2c0be02850..f5bbf5473d 100644 --- a/pipenv/patched/pip/_internal/req/constructors.py +++ b/pipenv/patched/pip/_internal/req/constructors.py @@ -8,10 +8,11 @@ InstallRequirement. """ +import copy import logging import os import re -from typing import Dict, List, Optional, Set, Tuple, Union +from typing import Collection, Dict, List, Optional, Set, Tuple, Union from pipenv.patched.pip._vendor.packaging.markers import Marker from pipenv.patched.pip._vendor.packaging.requirements import InvalidRequirement, Requirement @@ -57,6 +58,31 @@ def convert_extras(extras: Optional[str]) -> Set[str]: return get_requirement("placeholder" + extras.lower()).extras +def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement: + """ + Returns a new requirement based on the given one, with the supplied extras. If the + given requirement already has extras those are replaced (or dropped if no new extras + are given). + """ + match: Optional[re.Match[str]] = re.fullmatch( + # see https://peps.python.org/pep-0508/#complete-grammar + r"([\w\t .-]+)(\[[^\]]*\])?(.*)", + str(req), + flags=re.ASCII, + ) + # ireq.req is a valid requirement so the regex should always match + assert ( + match is not None + ), f"regex match on requirement {req} failed, this should never happen" + pre: Optional[str] = match.group(1) + post: Optional[str] = match.group(3) + assert ( + pre is not None and post is not None + ), f"regex group selection for requirement {req} failed, this should never happen" + extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" + return Requirement(f"{pre}{extras}{post}") + + def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: """Parses an editable requirement into: - a requirement name @@ -504,3 +530,47 @@ def install_req_from_link_and_ireq( config_settings=ireq.config_settings, user_supplied=ireq.user_supplied, ) + + +def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: + """ + Creates a new InstallationRequirement using the given template but without + any extras. Sets the original requirement as the new one's parent + (comes_from). + """ + return InstallRequirement( + req=( + _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None + ), + comes_from=ireq, + editable=ireq.editable, + link=ireq.link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + constraint=ireq.constraint, + extras=[], + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + permit_editable_wheels=ireq.permit_editable_wheels, + ) + + +def install_req_extend_extras( + ireq: InstallRequirement, + extras: Collection[str], +) -> InstallRequirement: + """ + Returns a copy of an installation requirement with some additional extras. + Makes a shallow copy of the ireq object. + """ + result = copy.copy(ireq) + result.extras = {*ireq.extras, *extras} + result.req = ( + _set_requirement_extras(ireq.req, result.extras) + if ireq.req is not None + else None + ) + return result diff --git a/pipenv/patched/pip/_internal/req/req_install.py b/pipenv/patched/pip/_internal/req/req_install.py index 8e461520ce..ab94540b80 100644 --- a/pipenv/patched/pip/_internal/req/req_install.py +++ b/pipenv/patched/pip/_internal/req/req_install.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import functools import logging import os @@ -9,6 +6,7 @@ import uuid import zipfile from optparse import Values +from pathlib import Path from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union from pipenv.patched.pip._vendor.packaging.markers import Marker @@ -20,7 +18,7 @@ from pipenv.patched.pip._vendor.pyproject_hooks import BuildBackendHookCaller from pipenv.patched.pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment -from pipenv.patched.pip._internal.exceptions import InstallationError +from pipenv.patched.pip._internal.exceptions import InstallationError, PreviousBuildDirError from pipenv.patched.pip._internal.locations import get_scheme from pipenv.patched.pip._internal.metadata import ( BaseDistribution, @@ -50,11 +48,14 @@ backup_dir, display_path, hide_url, + is_installable_dir, + redact_auth_from_requirement, redact_auth_from_url, ) from pipenv.patched.pip._internal.utils.packaging import safe_extra from pipenv.patched.pip._internal.utils.subprocess import runner_with_spinner_message from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pipenv.patched.pip._internal.utils.unpacking import unpack_file from pipenv.patched.pip._internal.utils.virtualenv import running_under_virtualenv from pipenv.patched.pip._internal.vcs import vcs @@ -128,7 +129,7 @@ def __init__( if extras: self.extras = extras elif req: - self.extras = {safe_extra(extra) for extra in req.extras} + self.extras = req.extras else: self.extras = set() if markers is None and req: @@ -183,9 +184,12 @@ def __init__( # This requirement needs more preparation before it can be built self.needs_more_preparation = False + # This requirement needs to be unpacked before it can be installed. + self._archive_source: Optional[Path] = None + def __str__(self) -> str: if self.req: - s = str(self.req) + s = redact_auth_from_requirement(self.req) if self.link: s += " from {}".format(redact_auth_from_url(self.link.url)) elif self.link: @@ -244,6 +248,7 @@ def supports_pyproject_editable(self) -> bool: @property def specifier(self) -> SpecifierSet: + assert self.req is not None return self.req.specifier @property @@ -257,7 +262,8 @@ def is_pinned(self) -> bool: For example, some-package==1.2 is pinned; some-package>1.2 is not. """ - specifiers = self.specifier + assert self.req is not None + specifiers = self.req.specifier return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: @@ -267,7 +273,12 @@ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> boo extras_requested = ("",) if self.markers is not None: return any( - self.markers.evaluate({"extra": extra}) for extra in extras_requested + self.markers.evaluate({"extra": extra}) + # TODO: Remove these two variants when packaging is upgraded to + # support the marker comparison logic specified in PEP 685. + or self.markers.evaluate({"extra": safe_extra(extra)}) + or self.markers.evaluate({"extra": canonicalize_name(extra)}) + for extra in extras_requested ) else: return True @@ -305,6 +316,7 @@ def hashes(self, trust_internet: bool = True) -> Hashes: else: link = None if link and link.hash: + assert link.hash_name is not None good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes) @@ -314,6 +326,7 @@ def from_path(self) -> Optional[str]: return None s = str(self.req) if self.comes_from: + comes_from: Optional[str] if isinstance(self.comes_from, str): comes_from = self.comes_from else: @@ -345,7 +358,7 @@ def ensure_build_location( # When parallel builds are enabled, add a UUID to the build directory # name so multiple builds do not interfere with each other. - dir_name: str = canonicalize_name(self.name) + dir_name: str = canonicalize_name(self.req.name) if parallel_builds: dir_name = f"{dir_name}_{uuid.uuid4().hex}" @@ -388,6 +401,7 @@ def _set_requirement(self) -> None: ) def warn_on_mismatching_name(self) -> None: + assert self.req is not None metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) == metadata_name: # Everything is fine. @@ -457,6 +471,7 @@ def is_wheel_from_cache(self) -> bool: # Things valid for sdists @property def unpacked_source_directory(self) -> str: + assert self.source_dir, f"No source dir for {self}" return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or "" ) @@ -500,7 +515,7 @@ def load_pyproject_toml(self) -> None: "to use --use-pep517 or add a " "pyproject.toml file to the project" ), - gone_in="23.3", + gone_in="24.0", ) self.use_pep517 = False return @@ -544,7 +559,7 @@ def prepare_metadata(self) -> None: Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info. """ - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" details = self.name or f"from {self.link}" if self.use_pep517: @@ -593,8 +608,10 @@ def get_dist(self) -> BaseDistribution: if self.metadata_directory: return get_directory_distribution(self.metadata_directory) elif self.local_file_path and self.is_wheel: + assert self.req is not None return get_wheel_distribution( - FilesystemWheel(self.local_file_path), canonicalize_name(self.name) + FilesystemWheel(self.local_file_path), + canonicalize_name(self.req.name), ) raise AssertionError( f"InstallRequirement {self} has no metadata directory and no wheel: " @@ -602,9 +619,9 @@ def get_dist(self) -> BaseDistribution: ) def assert_source_matches_version(self) -> None: - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" version = self.metadata["version"] - if self.req.specifier and version not in self.req.specifier: + if self.req and self.req.specifier and version not in self.req.specifier: logger.warning( "Requested %s, but installing version %s", self, @@ -641,6 +658,27 @@ def ensure_has_source_dir( parallel_builds=parallel_builds, ) + def needs_unpacked_archive(self, archive_source: Path) -> None: + assert self._archive_source is None + self._archive_source = archive_source + + def ensure_pristine_source_checkout(self) -> None: + """Ensure the source directory has not yet been built in.""" + assert self.source_dir is not None + if self._archive_source is not None: + unpack_file(str(self._archive_source), self.source_dir) + elif is_installable_dir(self.source_dir): + # If a checkout exists, it's unwise to keep going. + # version inconsistencies are logged later, but do not fail + # the installation. + raise PreviousBuildDirError( + f"pip can't proceed with requirements '{self}' due to a " + f"pre-existing build directory ({self.source_dir}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again." + ) + # For editable installations def update_editable(self) -> None: if not self.link: @@ -697,9 +735,10 @@ def _clean_zip_name(name: str, prefix: str) -> str: name = name.replace(os.path.sep, "/") return name + assert self.req is not None path = os.path.join(parentdir, path) name = _clean_zip_name(path, rootdir) - return self.name + "/" + name + return self.req.name + "/" + name def archive(self, build_dir: Optional[str]) -> None: """Saves archive to provided build_dir. @@ -778,8 +817,9 @@ def install( use_user_site: bool = False, pycompile: bool = True, ) -> None: + assert self.req is not None scheme = get_scheme( - self.name, + self.req.name, user=use_user_site, home=home, root=root, @@ -793,7 +833,7 @@ def install( prefix=prefix, home=home, use_user_site=use_user_site, - name=self.name, + name=self.req.name, setup_py_path=self.setup_py_path, isolated=self.isolated, build_env=self.build_env, @@ -806,7 +846,7 @@ def install( assert self.local_file_path install_wheel( - self.name, + self.req.name, self.local_file_path, scheme=scheme, req_description=str(self.req), @@ -866,7 +906,7 @@ def check_legacy_setup_py_options( reason="--build-option and --global-option are deprecated.", issue=11859, replacement="to use --config-settings", - gone_in="23.3", + gone_in="24.0", ) logger.warning( "Implying --no-binary=:all: due to the presence of " diff --git a/pipenv/patched/pip/_internal/req/req_set.py b/pipenv/patched/pip/_internal/req/req_set.py index 793a36cc53..f3dab449c3 100644 --- a/pipenv/patched/pip/_internal/req/req_set.py +++ b/pipenv/patched/pip/_internal/req/req_set.py @@ -99,7 +99,7 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) for dep in req.get_dist().iter_dependencies(): if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -115,5 +115,5 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) diff --git a/pipenv/patched/pip/_internal/req/req_uninstall.py b/pipenv/patched/pip/_internal/req/req_uninstall.py index fd839386e4..c670c524de 100644 --- a/pipenv/patched/pip/_internal/req/req_uninstall.py +++ b/pipenv/patched/pip/_internal/req/req_uninstall.py @@ -274,7 +274,7 @@ def stash(self, path: str) -> str: def commit(self) -> None: """Commits the uninstall by removing stashed files.""" - for _, save_dir in self._save_dirs.items(): + for save_dir in self._save_dirs.values(): save_dir.cleanup() self._moves = [] self._save_dirs = {} diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/base.py b/pipenv/patched/pip/_internal/resolution/resolvelib/base.py index 01e8b3b9c2..34e7b60850 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/base.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/base.py @@ -1,7 +1,7 @@ from typing import FrozenSet, Iterable, Optional, Tuple, Union from pipenv.patched.pip._vendor.packaging.specifiers import SpecifierSet -from pipenv.patched.pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pipenv.patched.pip._vendor.packaging.utils import NormalizedName from pipenv.patched.pip._vendor.packaging.version import LegacyVersion, Version from pipenv.patched.pip._internal.models.link import Link, links_equivalent @@ -12,11 +12,11 @@ CandidateVersion = Union[LegacyVersion, Version] -def format_name(project: str, extras: FrozenSet[str]) -> str: +def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str: if not extras: return project - canonical_extras = sorted(canonicalize_name(e) for e in extras) - return "{}[{}]".format(project, ",".join(canonical_extras)) + extras_expr = ",".join(sorted(extras)) + return f"{project}[{extras_expr}]" class Constraint: diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/candidates.py b/pipenv/patched/pip/_internal/resolution/resolvelib/candidates.py index 19364aaf22..44829151cf 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/candidates.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/candidates.py @@ -241,7 +241,7 @@ def _prepare(self) -> BaseDistribution: def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: requires = self.dist.iter_dependencies() if with_requires else () for r in requires: - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) yield self._factory.make_requires_python_requirement(self.dist.requires_python) def get_install_requirement(self) -> Optional[InstallRequirement]: @@ -396,7 +396,7 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return for r in self.dist.iter_dependencies(): - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) def get_install_requirement(self) -> Optional[InstallRequirement]: return None @@ -431,9 +431,28 @@ def __init__( self, base: BaseCandidate, extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, ) -> None: + """ + :param comes_from: the InstallRequirement that led to this candidate if it + differs from the base's InstallRequirement. This will often be the + case in the sense that this candidate's requirement has the extras + while the base's does not. Unlike the InstallRequirement backed + candidates, this requirement is used solely for reporting purposes, + it does not do any leg work. + """ self.base = base - self.extras = extras + self.extras = frozenset(canonicalize_name(e) for e in extras) + # If any extras are requested in their non-normalized forms, keep track + # of their raw values. This is needed when we look up dependencies + # since PEP 685 has not been implemented for marker-matching, and using + # the non-normalized extra for lookup ensures the user can select a + # non-normalized extra in a package with its non-normalized form. + # TODO: Remove this attribute when packaging is upgraded to support the + # marker comparison logic specified in PEP 685. + self._unnormalized_extras = extras.difference(self.extras) + self._comes_from = comes_from if comes_from is not None else self.base._ireq def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) @@ -484,6 +503,50 @@ def is_editable(self) -> bool: def source_link(self) -> Optional[Link]: return self.base.source_link + def _warn_invalid_extras( + self, + requested: FrozenSet[str], + valid: FrozenSet[str], + ) -> None: + """Emit warnings for invalid extras being requested. + + This emits a warning for each requested extra that is not in the + candidate's ``Provides-Extra`` list. + """ + invalid_extras_to_warn = frozenset( + extra + for extra in requested + if extra not in valid + # If an extra is requested in an unnormalized form, skip warning + # about the normalized form being missing. + and extra in self.extras + ) + if not invalid_extras_to_warn: + return + for extra in sorted(invalid_extras_to_warn): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + def _calculate_valid_requested_extras(self) -> FrozenSet[str]: + """Get a list of valid extras requested by this candidate. + + The user (or upstream dependant) may have specified extras that the + candidate doesn't support. Any unsupported extras are dropped, and each + cause a warning to be logged here. + """ + requested_extras = self.extras.union(self._unnormalized_extras) + valid_extras = frozenset( + extra + for extra in requested_extras + if self.base.dist.is_extra_provided(extra) + ) + self._warn_invalid_extras(requested_extras, valid_extras) + return valid_extras + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: factory = self.base._factory @@ -493,24 +556,13 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return - # The user may have specified extras that the candidate doesn't - # support. We ignore any unsupported extras here. - valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) - invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) - for extra in sorted(invalid_extras): - logger.warning( - "%s %s does not provide the extra '%s'", - self.base.name, - self.version, - extra, - ) - + valid_extras = self._calculate_valid_requested_extras() for r in self.base.dist.iter_dependencies(valid_extras): - requirement = factory.make_requirement_from_spec( - str(r), self.base._ireq, valid_extras + yield from factory.make_requirements_from_spec( + str(r), + self._comes_from, + valid_extras, ) - if requirement: - yield requirement def get_install_requirement(self) -> Optional[InstallRequirement]: # We don't return anything here, because we always diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py index 81d43e59d2..fb8274a59b 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/factory.py @@ -62,6 +62,7 @@ ExplicitRequirement, RequiresPythonRequirement, SpecifierRequirement, + SpecifierWithoutExtrasRequirement, UnsatisfiableRequirement, ) @@ -112,7 +113,7 @@ def __init__( self._editable_candidate_cache: Cache[EditableCandidate] = {} self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} self._extras_candidate_cache: Dict[ - Tuple[int, FrozenSet[str]], ExtrasCandidate + Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate ] = {} if not ignore_installed: @@ -132,19 +133,23 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: if not link.is_wheel: return wheel = Wheel(link.filename) - if wheel.supported(self._finder.target_python.get_tags()): + if wheel.supported(self._finder.target_python.get_unsorted_tags()): return msg = f"{link.filename} is not a supported wheel on this platform." raise UnsupportedWheel(msg) def _make_extras_candidate( - self, base: BaseCandidate, extras: FrozenSet[str] + self, + base: BaseCandidate, + extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, ) -> ExtrasCandidate: - cache_key = (id(base), extras) + cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras)) try: candidate = self._extras_candidate_cache[cache_key] except KeyError: - candidate = ExtrasCandidate(base, extras) + candidate = ExtrasCandidate(base, extras, comes_from=comes_from) self._extras_candidate_cache[cache_key] = candidate return candidate @@ -161,7 +166,7 @@ def _make_candidate_from_dist( self._installed_candidate_cache[dist.canonical_name] = base if not extras: return base - return self._make_extras_candidate(base, extras) + return self._make_extras_candidate(base, extras, comes_from=template) def _make_candidate_from_link( self, @@ -223,7 +228,7 @@ def _make_candidate_from_link( if not extras: return base - return self._make_extras_candidate(base, extras) + return self._make_extras_candidate(base, extras, comes_from=template) def _iter_found_candidates( self, @@ -385,16 +390,21 @@ def find_candidates( if ireq is not None: ireqs.append(ireq) - # If the current identifier contains extras, add explicit candidates - # from entries from extra-less identifier. + # If the current identifier contains extras, add requires and explicit + # candidates from entries from extra-less identifier. with contextlib.suppress(InvalidRequirement): parsed_requirement = get_requirement(identifier) - explicit_candidates.update( - self._iter_explicit_candidates_from_base( - requirements.get(parsed_requirement.name, ()), - frozenset(parsed_requirement.extras), - ), - ) + if parsed_requirement.name != identifier: + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) # Add explicit candidates from constraints. We only do this if there are # known ireqs, which represent requirements not already explicit. If @@ -437,37 +447,49 @@ def find_candidates( and all(req.is_satisfied_by(c) for req in requirements[identifier]) ) - def _make_requirement_from_install_req( + def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> Optional[Requirement]: + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given InstallRequirement. In + most cases this will be a single object but the following special cases exist: + - the InstallRequirement has markers that do not apply -> result is empty + - the InstallRequirement has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", ireq.name, ireq.markers, ) - return None - if not ireq.link: - return SpecifierRequirement(ireq) - self._fail_if_link_is_unsupported_wheel(ireq.link) - cand = self._make_candidate_from_link( - ireq.link, - extras=frozenset(ireq.extras), - template=ireq, - name=canonicalize_name(ireq.name) if ireq.name else None, - version=None, - ) - if cand is None: - # There's no way we can satisfy a URL requirement if the underlying - # candidate fails to build. An unnamed URL must be user-supplied, so - # we fail eagerly. If the URL is named, an unsatisfiable requirement - # can make the resolver do the right thing, either backtrack (and - # maybe find some other requirement that's buildable) or raise a - # ResolutionImpossible eventually. - if not ireq.name: - raise self._build_failures[ireq.link] - return UnsatisfiableRequirement(canonicalize_name(ireq.name)) - return self.make_requirement_from_candidate(cand) + elif not ireq.link: + if ireq.extras and ireq.req is not None and ireq.req.specifier: + yield SpecifierWithoutExtrasRequirement(ireq) + yield SpecifierRequirement(ireq) + else: + self._fail_if_link_is_unsupported_wheel(ireq.link) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + yield UnsatisfiableRequirement(canonicalize_name(ireq.name)) + else: + yield self.make_requirement_from_candidate(cand) def collect_root_requirements( self, root_ireqs: List[InstallRequirement] @@ -488,15 +510,27 @@ def collect_root_requirements( else: collected.constraints[name] = Constraint.from_ireq(ireq) else: - req = self._make_requirement_from_install_req( - ireq, - requested_extras=(), + reqs = list( + self._make_requirements_from_install_req( + ireq, + requested_extras=(), + ) ) - if req is None: + if not reqs: continue - if ireq.user_supplied and req.name not in collected.user_requested: - collected.user_requested[req.name] = i - collected.requirements.append(req) + template = reqs[0] + if ireq.user_supplied and template.name not in collected.user_requested: + collected.user_requested[template.name] = i + collected.requirements.extend(reqs) + # Put requirements with extras at the end of the root requires. This does not + # affect resolvelib's picking preference but it does affect its initial criteria + # population: by putting extras at the end we enable the candidate finder to + # present resolvelib with a smaller set of candidates to resolvelib, already + # taking into account any non-transient constraints on the associated base. This + # means resolvelib will have fewer candidates to visit and reject. + # Python's list sort is stable, meaning relative order is kept for objects with + # the same key. + collected.requirements.sort(key=lambda r: r.name != r.project_name) return collected def make_requirement_from_candidate( @@ -504,14 +538,23 @@ def make_requirement_from_candidate( ) -> ExplicitRequirement: return ExplicitRequirement(candidate) - def make_requirement_from_spec( + def make_requirements_from_spec( self, specifier: str, comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), - ) -> Optional[Requirement]: + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given specifier. In most cases + this will be a single object but the following special cases exist: + - the specifier has markers that do not apply -> result is empty + - the specifier has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ ireq = self._make_install_req_from_spec(specifier, comes_from) - return self._make_requirement_from_install_req(ireq, requested_extras) + return self._make_requirements_from_install_req(ireq, requested_extras) def make_requires_python_requirement( self, @@ -603,8 +646,26 @@ def _report_single_requirement_conflict( cands = self._finder.find_all_candidates(req.project_name) skipped_by_requires_python = self._finder.requires_python_skipped_reasons() - versions = [str(v) for v in sorted({c.version for c in cands})] + versions_set: Set[CandidateVersion] = set() + yanked_versions_set: Set[CandidateVersion] = set() + for c in cands: + is_yanked = c.link.is_yanked if c.link else False + if is_yanked: + yanked_versions_set.add(c.version) + else: + versions_set.add(c.version) + + versions = [str(v) for v in sorted(versions_set)] + yanked_versions = [str(v) for v in sorted(yanked_versions_set)] + + if yanked_versions: + # Saying "version X is yanked" isn't entirely accurate. + # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842 + logger.critical( + "Ignored the following yanked versions: %s", + ", ".join(yanked_versions) or "none", + ) if skipped_by_requires_python: logger.critical( "Ignored the following versions that require a different python " diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/requirements.py b/pipenv/patched/pip/_internal/resolution/resolvelib/requirements.py index 2518b97d4b..0b52617558 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/requirements.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/requirements.py @@ -1,6 +1,7 @@ from pipenv.patched.pip._vendor.packaging.specifiers import SpecifierSet from pipenv.patched.pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pipenv.patched.pip._internal.req.constructors import install_req_drop_extras from pipenv.patched.pip._internal.req.req_install import InstallRequirement from .base import Candidate, CandidateLookup, Requirement, format_name @@ -43,7 +44,7 @@ class SpecifierRequirement(Requirement): def __init__(self, ireq: InstallRequirement) -> None: assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq - self._extras = frozenset(ireq.extras) + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) def __str__(self) -> str: return str(self._ireq.req) @@ -92,6 +93,18 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: return spec.contains(candidate.version, prereleases=True) +class SpecifierWithoutExtrasRequirement(SpecifierRequirement): + """ + Requirement backed by an install requirement on a base package. + Trims extras from its install requirement if there are any. + """ + + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = install_req_drop_extras(ireq) + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + class RequiresPythonRequirement(Requirement): """A requirement representing Requires-Python metadata.""" diff --git a/pipenv/patched/pip/_internal/resolution/resolvelib/resolver.py b/pipenv/patched/pip/_internal/resolution/resolvelib/resolver.py index 056ba19ba2..a8ee8fd222 100644 --- a/pipenv/patched/pip/_internal/resolution/resolvelib/resolver.py +++ b/pipenv/patched/pip/_internal/resolution/resolvelib/resolver.py @@ -1,3 +1,4 @@ +import contextlib import functools import logging import os @@ -11,6 +12,7 @@ from pipenv.patched.pip._internal.cache import WheelCache from pipenv.patched.pip._internal.index.package_finder import PackageFinder from pipenv.patched.pip._internal.operations.prepare import RequirementPreparer +from pipenv.patched.pip._internal.req.constructors import install_req_extend_extras from pipenv.patched.pip._internal.req.req_install import InstallRequirement from pipenv.patched.pip._internal.req.req_set import RequirementSet from pipenv.patched.pip._internal.resolution.base import BaseResolver, InstallRequirementProvider @@ -19,6 +21,7 @@ PipDebuggingReporter, PipReporter, ) +from pipenv.patched.pip._internal.utils.packaging import get_requirement from .base import Candidate, Requirement from .factory import Factory @@ -101,9 +104,24 @@ def resolve( raise error from e req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - for candidate in result.mapping.values(): + # process candidates with extras last to ensure their base equivalent is + # already in the req_set if appropriate. + # Python's sort is stable so using a binary key function keeps relative order + # within both subsets. + for candidate in sorted( + result.mapping.values(), key=lambda c: c.name != c.project_name + ): ireq = candidate.get_install_requirement() if ireq is None: + if candidate.name != candidate.project_name: + # extend existing req's extras + with contextlib.suppress(KeyError): + req = req_set.get_requirement(candidate.project_name) + req_set.add_named_requirement( + install_req_extend_extras( + req, get_requirement(candidate.name).extras + ) + ) continue # Check if there is already an installation under the same name, diff --git a/pipenv/patched/pip/_internal/self_outdated_check.py b/pipenv/patched/pip/_internal/self_outdated_check.py index 26c277e2f6..654eb73f08 100644 --- a/pipenv/patched/pip/_internal/self_outdated_check.py +++ b/pipenv/patched/pip/_internal/self_outdated_check.py @@ -28,8 +28,7 @@ from pipenv.patched.pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pipenv.patched.pip._internal.utils.misc import ensure_dir -_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" - +_WEEK = datetime.timedelta(days=7) logger = logging.getLogger(__name__) @@ -73,12 +72,10 @@ def get(self, current_time: datetime.datetime) -> Optional[str]: if "pypi_version" not in self._state: return None - seven_days_in_seconds = 7 * 24 * 60 * 60 - # Determine if we need to refresh the state - last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT) - seconds_since_last_check = (current_time - last_check).total_seconds() - if seconds_since_last_check > seven_days_in_seconds: + last_check = datetime.datetime.fromisoformat(self._state["last_check"]) + time_since_last_check = current_time - last_check + if time_since_last_check > _WEEK: return None return self._state["pypi_version"] @@ -100,7 +97,7 @@ def set(self, pypi_version: str, current_time: datetime.datetime) -> None: # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, - "last_check": current_time.strftime(_DATE_FMT), + "last_check": current_time.isoformat(), "pypi_version": pypi_version, } @@ -229,14 +226,14 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non try: upgrade_prompt = _self_version_check_logic( state=SelfCheckState(cache_dir=options.cache_dir), - current_time=datetime.datetime.utcnow(), + current_time=datetime.datetime.now(datetime.timezone.utc), local_version=installed_dist.version, get_remote_version=functools.partial( _get_current_remote_pip_version, session, options ), ) if upgrade_prompt is not None: - logger.warning("[present-rich] %s", upgrade_prompt) + logger.warning("%s", upgrade_prompt, extra={"rich": True}) except Exception: logger.warning("There was an error checking the latest version of pip.") logger.debug("See below for error", exc_info=True) diff --git a/pipenv/patched/pip/_internal/utils/inject_securetransport.py b/pipenv/patched/pip/_internal/utils/inject_securetransport.py deleted file mode 100644 index 4a25ab205d..0000000000 --- a/pipenv/patched/pip/_internal/utils/inject_securetransport.py +++ /dev/null @@ -1,35 +0,0 @@ -"""A helper module that injects SecureTransport, on import. - -The import should be done as early as possible, to ensure all requests and -sessions (or whatever) are created after injecting SecureTransport. - -Note that we only do the injection on macOS, when the linked OpenSSL is too -old to handle TLSv1.2. -""" - -import sys - - -def inject_securetransport() -> None: - # Only relevant on macOS - if sys.platform != "darwin": - return - - try: - import ssl - except ImportError: - return - - # Checks for OpenSSL 1.0.1 - if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: - return - - try: - from pipenv.patched.pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - return - - securetransport.inject_into_urllib3() - - -inject_securetransport() diff --git a/pipenv/patched/pip/_internal/utils/logging.py b/pipenv/patched/pip/_internal/utils/logging.py index f905a9e754..758380bc18 100644 --- a/pipenv/patched/pip/_internal/utils/logging.py +++ b/pipenv/patched/pip/_internal/utils/logging.py @@ -155,8 +155,8 @@ def emit(self, record: logging.LogRecord) -> None: # If we are given a diagnostic error to present, present it with indentation. assert isinstance(record.args, tuple) - if record.msg == "[present-rich] %s" and len(record.args) == 1: - rich_renderable = record.args[0] + if getattr(record, "rich", False): + (rich_renderable,) = record.args assert isinstance( rich_renderable, (ConsoleRenderable, RichCast, str) ), f"{rich_renderable} is not rich-console-renderable" diff --git a/pipenv/patched/pip/_internal/utils/misc.py b/pipenv/patched/pip/_internal/utils/misc.py index c17d3bc8e0..4721531fe7 100644 --- a/pipenv/patched/pip/_internal/utils/misc.py +++ b/pipenv/patched/pip/_internal/utils/misc.py @@ -11,9 +11,11 @@ import sys import sysconfig import urllib.parse +from functools import partial from io import StringIO from itertools import filterfalse, tee, zip_longest -from types import TracebackType +from pathlib import Path +from types import FunctionType, TracebackType from typing import ( Any, BinaryIO, @@ -33,6 +35,7 @@ cast, ) +from pipenv.patched.pip._vendor.packaging.requirements import Requirement from pipenv.patched.pip._vendor.pyproject_hooks import BuildBackendHookCaller from pipenv.patched.pip._vendor.tenacity import retry, stop_after_delay, wait_fixed @@ -66,6 +69,8 @@ ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] VersionInfo = Tuple[int, int, int] NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] +OnExc = Callable[[FunctionType, Path, BaseException], Any] +OnErr = Callable[[FunctionType, Path, ExcInfo], Any] def get_pip_version() -> str: @@ -123,33 +128,75 @@ def get_prog() -> str: # Retry every half second for up to 3 seconds # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) -def rmtree(dir: str, ignore_errors: bool = False) -> None: +def rmtree( + dir: str, + ignore_errors: bool = False, + onexc: Optional[OnExc] = None, +) -> None: + if ignore_errors: + onexc = _onerror_ignore + if onexc is None: + onexc = _onerror_reraise + handler: OnErr = partial( + # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to + # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`. + cast(Union[OnExc, OnErr], rmtree_errorhandler), + onexc=onexc, + ) if sys.version_info >= (3, 12): - shutil.rmtree(dir, ignore_errors=ignore_errors, onexc=rmtree_errorhandler) + # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. + shutil.rmtree(dir, onexc=handler) else: - shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + shutil.rmtree(dir, onerror=handler) + + +def _onerror_ignore(*_args: Any) -> None: + pass + + +def _onerror_reraise(*_args: Any) -> None: + raise def rmtree_errorhandler( - func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException] + func: FunctionType, + path: Path, + exc_info: Union[ExcInfo, BaseException], + *, + onexc: OnExc = _onerror_reraise, ) -> None: - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" + """ + `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). + + * If a file is readonly then it's write flag is set and operation is + retried. + + * `onerror` is the original callback from `rmtree(... onerror=onerror)` + that is chained at the end if the "rm -f" still fails. + """ try: - has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + st_mode = os.stat(path).st_mode except OSError: # it's equivalent to os.path.exists return - if has_attr_readonly: + if not st_mode & stat.S_IWRITE: # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation - func(path) - return - else: - raise + try: + os.chmod(path, st_mode | stat.S_IWRITE) + except OSError: + pass + else: + # use the original function to repeat the operation + try: + func(path) + return + except OSError: + pass + + if not isinstance(exc_info, BaseException): + _, exc_info, _ = exc_info + onexc(func, path, exc_info) def display_path(path: str) -> str: @@ -532,6 +579,13 @@ def redact_auth_from_url(url: str) -> str: return _transform_url(url, _redact_netloc)[0] +def redact_auth_from_requirement(req: Requirement) -> str: + """Replace the password in a given requirement url with ****.""" + if not req.url: + return str(req) + return str(req).replace(req.url, redact_auth_from_url(req.url)) + + class HiddenText: def __init__(self, secret: str, redacted: str) -> None: self.secret = secret diff --git a/pipenv/patched/pip/_internal/utils/subprocess.py b/pipenv/patched/pip/_internal/utils/subprocess.py index 95d483144b..fc638f44e5 100644 --- a/pipenv/patched/pip/_internal/utils/subprocess.py +++ b/pipenv/patched/pip/_internal/utils/subprocess.py @@ -209,7 +209,7 @@ def call_subprocess( output_lines=all_output if not showing_subprocess else None, ) if log_failed_cmd: - subprocess_logger.error("[present-rich] %s", error) + subprocess_logger.error("%s", error, extra={"rich": True}) subprocess_logger.verbose( "[bold magenta]full command[/]: [blue]%s[/]", escape(format_command_args(cmd)), diff --git a/pipenv/patched/pip/_internal/utils/temp_dir.py b/pipenv/patched/pip/_internal/utils/temp_dir.py index 47d9418864..a32b82d4ef 100644 --- a/pipenv/patched/pip/_internal/utils/temp_dir.py +++ b/pipenv/patched/pip/_internal/utils/temp_dir.py @@ -3,8 +3,19 @@ import logging import os.path import tempfile +import traceback from contextlib import ExitStack, contextmanager -from typing import Any, Dict, Generator, Optional, TypeVar, Union +from pathlib import Path +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + Optional, + TypeVar, + Union, +) from pipenv.patched.pip._internal.utils.misc import enum, rmtree @@ -106,6 +117,7 @@ def __init__( delete: Union[bool, None, _Default] = _default, kind: str = "temp", globally_managed: bool = False, + ignore_cleanup_errors: bool = True, ): super().__init__() @@ -128,6 +140,7 @@ def __init__( self._deleted = False self.delete = delete self.kind = kind + self.ignore_cleanup_errors = ignore_cleanup_errors if globally_managed: assert _tempdir_manager is not None @@ -170,7 +183,44 @@ def cleanup(self) -> None: self._deleted = True if not os.path.exists(self._path): return - rmtree(self._path) + + errors: List[BaseException] = [] + + def onerror( + func: Callable[..., Any], + path: Path, + exc_val: BaseException, + ) -> None: + """Log a warning for a `rmtree` error and continue""" + formatted_exc = "\n".join( + traceback.format_exception_only(type(exc_val), exc_val) + ) + formatted_exc = formatted_exc.rstrip() # remove trailing new line + if func in (os.unlink, os.remove, os.rmdir): + logger.debug( + "Failed to remove a temporary file '%s' due to %s.\n", + path, + formatted_exc, + ) + else: + logger.debug("%s failed with %s.", func.__qualname__, formatted_exc) + errors.append(exc_val) + + if self.ignore_cleanup_errors: + try: + # first try with tenacity; retrying to handle ephemeral errors + rmtree(self._path, ignore_errors=False) + except OSError: + # last pass ignore/log all errors + rmtree(self._path, onexc=onerror) + if errors: + logger.warning( + "Failed to remove contents in a temporary directory '%s'.\n" + "You can safely remove it manually.", + self._path, + ) + else: + rmtree(self._path) class AdjacentTempDirectory(TempDirectory): diff --git a/pipenv/patched/pip/_internal/vcs/git.py b/pipenv/patched/pip/_internal/vcs/git.py index fec2ce03ec..5b1c64dca4 100644 --- a/pipenv/patched/pip/_internal/vcs/git.py +++ b/pipenv/patched/pip/_internal/vcs/git.py @@ -101,7 +101,7 @@ def get_git_version(self) -> Tuple[int, ...]: if not match: logger.warning("Can't parse git version: %s", version) return () - return tuple(int(c) for c in match.groups()) + return (int(match.group(1)), int(match.group(2))) @classmethod def get_current_branch(cls, location: str) -> Optional[str]: diff --git a/pipenv/patched/pip/_internal/vcs/mercurial.py b/pipenv/patched/pip/_internal/vcs/mercurial.py index bda8745079..be4e3c8cc4 100644 --- a/pipenv/patched/pip/_internal/vcs/mercurial.py +++ b/pipenv/patched/pip/_internal/vcs/mercurial.py @@ -31,7 +31,7 @@ class Mercurial(VersionControl): @staticmethod def get_base_rev_args(rev: str) -> List[str]: - return ["-r", rev] + return [f"-r={rev}"] def fetch_new( self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int diff --git a/pipenv/patched/pip/_vendor/__init__.py b/pipenv/patched/pip/_vendor/__init__.py index 1db664a0a5..6e2f56c095 100644 --- a/pipenv/patched/pip/_vendor/__init__.py +++ b/pipenv/patched/pip/_vendor/__init__.py @@ -117,4 +117,5 @@ def vendored(modulename): vendored("rich.traceback") vendored("tenacity") vendored("tomli") + vendored("truststore") vendored("urllib3") diff --git a/pipenv/patched/pip/_vendor/cachecontrol/__init__.py b/pipenv/patched/pip/_vendor/cachecontrol/__init__.py index f631ae6df4..80c26253b3 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/__init__.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/__init__.py @@ -8,11 +8,21 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.11" +__version__ = "0.13.1" -from .wrapper import CacheControl -from .adapter import CacheControlAdapter -from .controller import CacheController +from pipenv.patched.pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pipenv.patched.pip._vendor.cachecontrol.controller import CacheController +from pipenv.patched.pip._vendor.cachecontrol.wrapper import CacheControl + +__all__ = [ + "__author__", + "__email__", + "__version__", + "CacheControlAdapter", + "CacheController", + "CacheControl", +] import logging + logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/pipenv/patched/pip/_vendor/cachecontrol/_cmd.py b/pipenv/patched/pip/_vendor/cachecontrol/_cmd.py index f84f866f78..856b3cd5a2 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/_cmd.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/_cmd.py @@ -1,8 +1,11 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import logging +from argparse import ArgumentParser +from typing import TYPE_CHECKING from pipenv.patched.pip._vendor import requests @@ -10,16 +13,19 @@ from pipenv.patched.pip._vendor.cachecontrol.cache import DictCache from pipenv.patched.pip._vendor.cachecontrol.controller import logger -from argparse import ArgumentParser +if TYPE_CHECKING: + from argparse import Namespace + from pipenv.patched.pip._vendor.cachecontrol.controller import CacheController -def setup_logging(): + +def setup_logging() -> None: logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() logger.addHandler(handler) -def get_session(): +def get_session() -> requests.Session: adapter = CacheControlAdapter( DictCache(), cache_etags=True, serializer=None, heuristic=None ) @@ -27,17 +33,17 @@ def get_session(): sess.mount("http://", adapter) sess.mount("https://", adapter) - sess.cache_controller = adapter.controller + sess.cache_controller = adapter.controller # type: ignore[attr-defined] return sess -def get_args(): +def get_args() -> Namespace: parser = ArgumentParser() parser.add_argument("url", help="The URL to try and cache") return parser.parse_args() -def main(args=None): +def main() -> None: args = get_args() sess = get_session() @@ -48,10 +54,13 @@ def main(args=None): setup_logging() # try setting the cache - sess.cache_controller.cache_response(resp.request, resp.raw) + cache_controller: CacheController = ( + sess.cache_controller # type: ignore[attr-defined] + ) + cache_controller.cache_response(resp.request, resp.raw) # Now try to get it - if sess.cache_controller.cached_request(resp.request): + if cache_controller.cached_request(resp.request): print("Cached!") else: print("Not cached :(") diff --git a/pipenv/patched/pip/_vendor/cachecontrol/adapter.py b/pipenv/patched/pip/_vendor/cachecontrol/adapter.py index 509321d20f..508bd773ac 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/adapter.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/adapter.py @@ -1,16 +1,26 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -import types import functools +import types import zlib +from typing import TYPE_CHECKING, Any, Collection, Mapping from pipenv.patched.pip._vendor.requests.adapters import HTTPAdapter -from .controller import CacheController, PERMANENT_REDIRECT_STATUSES -from .cache import DictCache -from .filewrapper import CallbackFileWrapper +from pipenv.patched.pip._vendor.cachecontrol.cache import DictCache +from pipenv.patched.pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController +from pipenv.patched.pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper + +if TYPE_CHECKING: + from pipenv.patched.pip._vendor.requests import PreparedRequest, Response + from pipenv.patched.pip._vendor.urllib3 import HTTPResponse + + from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache + from pipenv.patched.pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pipenv.patched.pip._vendor.cachecontrol.serialize import Serializer class CacheControlAdapter(HTTPAdapter): @@ -18,16 +28,16 @@ class CacheControlAdapter(HTTPAdapter): def __init__( self, - cache=None, - cache_etags=True, - controller_class=None, - serializer=None, - heuristic=None, - cacheable_methods=None, - *args, - **kw - ): - super(CacheControlAdapter, self).__init__(*args, **kw) + cache: BaseCache | None = None, + cache_etags: bool = True, + controller_class: type[CacheController] | None = None, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + cacheable_methods: Collection[str] | None = None, + *args: Any, + **kw: Any, + ) -> None: + super().__init__(*args, **kw) self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) @@ -37,7 +47,16 @@ def __init__( self.cache, cache_etags=cache_etags, serializer=serializer ) - def send(self, request, cacheable_methods=None, **kw): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None, + proxies: Mapping[str, str] | None = None, + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. @@ -54,13 +73,17 @@ def send(self, request, cacheable_methods=None, **kw): # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) - resp = super(CacheControlAdapter, self).send(request, **kw) + resp = super().send(request, stream, timeout, verify, cert, proxies) return resp def build_response( - self, request, response, from_cache=False, cacheable_methods=None - ): + self, + request: PreparedRequest, + response: HTTPResponse, + from_cache: bool = False, + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Build a response by making a request or using the cache. @@ -102,36 +125,37 @@ def build_response( else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. - response._fp = CallbackFileWrapper( - response._fp, + response._fp = CallbackFileWrapper( # type: ignore[attr-defined] + response._fp, # type: ignore[attr-defined] functools.partial( self.controller.cache_response, request, response ), ) if response.chunked: - super_update_chunk_length = response._update_chunk_length + super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] - def _update_chunk_length(self): + def _update_chunk_length(self: HTTPResponse) -> None: super_update_chunk_length() if self.chunk_left == 0: - self._fp._close() + self._fp._close() # type: ignore[attr-defined] - response._update_chunk_length = types.MethodType( + response._update_chunk_length = types.MethodType( # type: ignore[attr-defined] _update_chunk_length, response ) - resp = super(CacheControlAdapter, self).build_response(request, response) + resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call] # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: + assert request.url is not None cache_url = self.controller.cache_url(request.url) self.cache.delete(cache_url) # Give the request a from_cache attr to let people use it - resp.from_cache = from_cache + resp.from_cache = from_cache # type: ignore[attr-defined] return resp - def close(self): + def close(self) -> None: self.cache.close() - super(CacheControlAdapter, self).close() + super().close() # type: ignore[no-untyped-call] diff --git a/pipenv/patched/pip/_vendor/cachecontrol/cache.py b/pipenv/patched/pip/_vendor/cachecontrol/cache.py index 2a965f595f..3293b0057c 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/cache.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/cache.py @@ -6,38 +6,46 @@ The cache object API for implementing caches. The default is a thread safe in-memory dictionary. """ +from __future__ import annotations + from threading import Lock +from typing import IO, TYPE_CHECKING, MutableMapping +if TYPE_CHECKING: + from datetime import datetime -class BaseCache(object): - def get(self, key): +class BaseCache: + def get(self, key: str) -> bytes | None: raise NotImplementedError() - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: raise NotImplementedError() - def delete(self, key): + def delete(self, key: str) -> None: raise NotImplementedError() - def close(self): + def close(self) -> None: pass class DictCache(BaseCache): - - def __init__(self, init_dict=None): + def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None: self.lock = Lock() self.data = init_dict or {} - def get(self, key): + def get(self, key: str) -> bytes | None: return self.data.get(key, None) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: with self.lock: self.data.update({key: value}) - def delete(self, key): + def delete(self, key: str) -> None: with self.lock: if key in self.data: self.data.pop(key) @@ -55,10 +63,11 @@ class SeparateBodyBaseCache(BaseCache): Similarly, the body should be loaded separately via ``get_body()``. """ - def set_body(self, key, body): + + def set_body(self, key: str, body: bytes) -> None: raise NotImplementedError() - def get_body(self, key): + def get_body(self, key: str) -> IO[bytes] | None: """ Return the body as file-like object. """ diff --git a/pipenv/patched/pip/_vendor/cachecontrol/caches/__init__.py b/pipenv/patched/pip/_vendor/cachecontrol/caches/__init__.py index 37827291fb..d77e4933b3 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/caches/__init__.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/caches/__init__.py @@ -2,8 +2,7 @@ # # SPDX-License-Identifier: Apache-2.0 -from .file_cache import FileCache, SeparateBodyFileCache -from .redis_cache import RedisCache - +from pipenv.patched.pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache +from pipenv.patched.pip._vendor.cachecontrol.caches.redis_cache import RedisCache __all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/pipenv/patched/pip/_vendor/cachecontrol/caches/file_cache.py b/pipenv/patched/pip/_vendor/cachecontrol/caches/file_cache.py index f1ddb2ebdf..88270fae7a 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/caches/file_cache.py @@ -1,22 +1,23 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import hashlib import os from textwrap import dedent +from typing import IO, TYPE_CHECKING -from ..cache import BaseCache, SeparateBodyBaseCache -from ..controller import CacheController +from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache +from pipenv.patched.pip._vendor.cachecontrol.controller import CacheController -try: - FileNotFoundError -except NameError: - # py2.X - FileNotFoundError = (IOError, OSError) +if TYPE_CHECKING: + from datetime import datetime + from filelock import BaseFileLock -def _secure_open_write(filename, fmode): + +def _secure_open_write(filename: str, fmode: int) -> IO[bytes]: # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY @@ -39,7 +40,7 @@ def _secure_open_write(filename, fmode): # there try: os.remove(filename) - except (IOError, OSError): + except OSError: # The file must not exist already, so we can just skip ahead to opening pass @@ -62,37 +63,27 @@ class _FileCacheMixin: def __init__( self, - directory, - forever=False, - filemode=0o0600, - dirmode=0o0700, - use_dir_lock=None, - lock_class=None, - ): - - if use_dir_lock is not None and lock_class is not None: - raise ValueError("Cannot use use_dir_lock and lock_class together") - + directory: str, + forever: bool = False, + filemode: int = 0o0600, + dirmode: int = 0o0700, + lock_class: type[BaseFileLock] | None = None, + ) -> None: try: - from lockfile import LockFile - from lockfile.mkdirlockfile import MkdirLockFile + if lock_class is None: + from filelock import FileLock + + lock_class = FileLock except ImportError: notice = dedent( """ NOTE: In order to use the FileCache you must have - lockfile installed. You can install it via pip: - pip install lockfile + filelock installed. You can install it via pip: + pip install filelock """ ) raise ImportError(notice) - else: - if use_dir_lock: - lock_class = MkdirLockFile - - elif lock_class is None: - lock_class = LockFile - self.directory = directory self.forever = forever self.filemode = filemode @@ -100,17 +91,17 @@ def __init__( self.lock_class = lock_class @staticmethod - def encode(x): + def encode(x: str) -> str: return hashlib.sha224(x.encode()).hexdigest() - def _fn(self, name): + def _fn(self, name: str) -> str: # NOTE: This method should not change as some may depend on it. # See: https://github.com/ionrock/cachecontrol/issues/63 hashed = self.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) - def get(self, key): + def get(self, key: str) -> bytes | None: name = self._fn(key) try: with open(name, "rb") as fh: @@ -119,26 +110,28 @@ def get(self, key): except FileNotFoundError: return None - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: name = self._fn(key) self._write(name, value) - def _write(self, path, data: bytes): + def _write(self, path: str, data: bytes) -> None: """ Safely write the data to the given path. """ # Make sure the directory exists try: os.makedirs(os.path.dirname(path), self.dirmode) - except (IOError, OSError): + except OSError: pass - with self.lock_class(path) as lock: + with self.lock_class(path + ".lock"): # Write our actual file - with _secure_open_write(lock.path, self.filemode) as fh: + with _secure_open_write(path, self.filemode) as fh: fh.write(data) - def _delete(self, key, suffix): + def _delete(self, key: str, suffix: str) -> None: name = self._fn(key) + suffix if not self.forever: try: @@ -153,7 +146,7 @@ class FileCache(_FileCacheMixin, BaseCache): downloads. """ - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") @@ -163,23 +156,23 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): peak memory usage. """ - def get_body(self, key): + def get_body(self, key: str) -> IO[bytes] | None: name = self._fn(key) + ".body" try: return open(name, "rb") except FileNotFoundError: return None - def set_body(self, key, body): + def set_body(self, key: str, body: bytes) -> None: name = self._fn(key) + ".body" self._write(name, body) - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") self._delete(key, ".body") -def url_to_file_path(url, filecache): +def url_to_file_path(url: str, filecache: FileCache) -> str: """Return the file cache path based on the URL. This does not ensure the file exists! diff --git a/pipenv/patched/pip/_vendor/cachecontrol/caches/redis_cache.py b/pipenv/patched/pip/_vendor/cachecontrol/caches/redis_cache.py index 6e79a47f48..9373f34f9e 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/caches/redis_cache.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -1,39 +1,48 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from __future__ import division -from datetime import datetime +from datetime import datetime, timezone +from typing import TYPE_CHECKING + from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache +if TYPE_CHECKING: + from redis import Redis -class RedisCache(BaseCache): - def __init__(self, conn): +class RedisCache(BaseCache): + def __init__(self, conn: Redis[bytes]) -> None: self.conn = conn - def get(self, key): + def get(self, key: str) -> bytes | None: return self.conn.get(key) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: if not expires: self.conn.set(key, value) elif isinstance(expires, datetime): - expires = expires - datetime.utcnow() - self.conn.setex(key, int(expires.total_seconds()), value) + now_utc = datetime.now(timezone.utc) + if expires.tzinfo is None: + now_utc = now_utc.replace(tzinfo=None) + delta = expires - now_utc + self.conn.setex(key, int(delta.total_seconds()), value) else: self.conn.setex(key, expires, value) - def delete(self, key): + def delete(self, key: str) -> None: self.conn.delete(key) - def clear(self): + def clear(self) -> None: """Helper for clearing all the keys in a database. Use with caution!""" for key in self.conn.keys(): self.conn.delete(key) - def close(self): + def close(self) -> None: """Redis uses connection pooling, no need to close the connection.""" pass diff --git a/pipenv/patched/pip/_vendor/cachecontrol/compat.py b/pipenv/patched/pip/_vendor/cachecontrol/compat.py deleted file mode 100644 index 2cd7e5e588..0000000000 --- a/pipenv/patched/pip/_vendor/cachecontrol/compat.py +++ /dev/null @@ -1,32 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -try: - from urllib.parse import urljoin -except ImportError: - from urlparse import urljoin - - -try: - import cPickle as pickle -except ImportError: - import pickle - -# Handle the case where the requests module has been patched to not have -# urllib3 bundled as part of its source. -try: - from pipenv.patched.pip._vendor.requests.packages.urllib3.response import HTTPResponse -except ImportError: - from pipenv.patched.pip._vendor.urllib3.response import HTTPResponse - -try: - from pipenv.patched.pip._vendor.requests.packages.urllib3.util import is_fp_closed -except ImportError: - from pipenv.patched.pip._vendor.urllib3.util import is_fp_closed - -# Replicate some six behaviour -try: - text_type = unicode -except NameError: - text_type = str diff --git a/pipenv/patched/pip/_vendor/cachecontrol/controller.py b/pipenv/patched/pip/_vendor/cachecontrol/controller.py index 466c15528c..55837157b1 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/controller.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/controller.py @@ -5,17 +5,27 @@ """ The httplib2 algorithms ported for use with requests. """ +from __future__ import annotations + +import calendar import logging import re -import calendar import time from email.utils import parsedate_tz +from typing import TYPE_CHECKING, Collection, Mapping from pipenv.patched.pip._vendor.requests.structures import CaseInsensitiveDict -from .cache import DictCache, SeparateBodyBaseCache -from .serialize import Serializer +from pipenv.patched.pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache +from pipenv.patched.pip._vendor.cachecontrol.serialize import Serializer + +if TYPE_CHECKING: + from typing import Literal + + from pipenv.patched.pip._vendor.requests import PreparedRequest + from pipenv.patched.pip._vendor.urllib3 import HTTPResponse + from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache logger = logging.getLogger(__name__) @@ -24,20 +34,26 @@ PERMANENT_REDIRECT_STATUSES = (301, 308) -def parse_uri(uri): +def parse_uri(uri: str) -> tuple[str, str, str, str, str]: """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ - groups = URI.match(uri).groups() + match = URI.match(uri) + assert match is not None + groups = match.groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) -class CacheController(object): +class CacheController: """An interface to see if request should cached or not.""" def __init__( - self, cache=None, cache_etags=True, serializer=None, status_codes=None + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + status_codes: Collection[int] | None = None, ): self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags @@ -45,7 +61,7 @@ def __init__( self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) @classmethod - def _urlnorm(cls, uri): + def _urlnorm(cls, uri: str) -> str: """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: @@ -65,10 +81,10 @@ def _urlnorm(cls, uri): return defrag_uri @classmethod - def cache_url(cls, uri): + def cache_url(cls, uri: str) -> str: return cls._urlnorm(uri) - def parse_cache_control(self, headers): + def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]: known_directives = { # https://tools.ietf.org/html/rfc7234#section-5.2 "max-age": (int, True), @@ -87,7 +103,7 @@ def parse_cache_control(self, headers): cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) - retval = {} + retval: dict[str, int | None] = {} for cc_directive in cc_headers.split(","): if not cc_directive.strip(): @@ -122,11 +138,33 @@ def parse_cache_control(self, headers): return retval - def cached_request(self, request): + def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: + """ + Load a cached response, or return None if it's not available. + """ + cache_url = request.url + assert cache_url is not None + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return None + + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + result = self.serializer.loads(request, cache_data, body_file) + if result is None: + logger.warning("Cache entry deserialization failed, entry ignored") + return result + + def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]: """ Return a cached response if it exists in the cache, otherwise return False. """ + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Looking up "%s" in the cache', cache_url) cc = self.parse_cache_control(request.headers) @@ -140,21 +178,9 @@ def cached_request(self, request): logger.debug('Request header has "max_age" as 0, cache bypassed') return False - # Request allows serving from the cache, let's see if we find something - cache_data = self.cache.get(cache_url) - if cache_data is None: - logger.debug("No cache entry available") - return False - - if isinstance(self.cache, SeparateBodyBaseCache): - body_file = self.cache.get_body(cache_url) - else: - body_file = None - - # Check whether it can be deserialized - resp = self.serializer.loads(request, cache_data, body_file) + # Check whether we can load the response from the cache: + resp = self._load_from_cache(request) if not resp: - logger.warning("Cache entry deserialization failed, entry ignored") return False # If we have a cached permanent redirect, return it immediately. We @@ -174,7 +200,7 @@ def cached_request(self, request): logger.debug(msg) return resp - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if not headers or "date" not in headers: if "etag" not in headers: # Without date or etag, the cached response can never be used @@ -185,7 +211,9 @@ def cached_request(self, request): return False now = time.time() - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) current_age = max(0, now - date) logger.debug("Current age based on date: %i", current_age) @@ -199,28 +227,30 @@ def cached_request(self, request): freshness_lifetime = 0 # Check the max-age pragma in the cache control header - if "max-age" in resp_cc: - freshness_lifetime = resp_cc["max-age"] + max_age = resp_cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) # If there isn't a max-age, check for an expires header elif "expires" in headers: expires = parsedate_tz(headers["expires"]) if expires is not None: - expire_time = calendar.timegm(expires) - date + expire_time = calendar.timegm(expires[:6]) - date freshness_lifetime = max(0, expire_time) logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. - if "max-age" in cc: - freshness_lifetime = cc["max-age"] + max_age = cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug( "Freshness lifetime from request max-age: %i", freshness_lifetime ) - if "min-fresh" in cc: - min_fresh = cc["min-fresh"] + min_fresh = cc.get("min-fresh") + if min_fresh is not None: # adjust our current age by our min fresh current_age += min_fresh logger.debug("Adjusted current age from min-fresh: %i", current_age) @@ -239,13 +269,12 @@ def cached_request(self, request): # return the original handler return False - def conditional_headers(self, request): - cache_url = self.cache_url(request.url) - resp = self.serializer.loads(request, self.cache.get(cache_url)) + def conditional_headers(self, request: PreparedRequest) -> dict[str, str]: + resp = self._load_from_cache(request) new_headers = {} if resp: - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if "etag" in headers: new_headers["If-None-Match"] = headers["ETag"] @@ -255,7 +284,14 @@ def conditional_headers(self, request): return new_headers - def _cache_set(self, cache_url, request, response, body=None, expires_time=None): + def _cache_set( + self, + cache_url: str, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + expires_time: int | None = None, + ) -> None: """ Store the data in the cache. """ @@ -267,7 +303,10 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) self.serializer.dumps(request, response, b""), expires=expires_time, ) - self.cache.set_body(cache_url, body) + # body is None can happen when, for example, we're only updating + # headers, as is the case in update_cached_response(). + if body is not None: + self.cache.set_body(cache_url, body) else: self.cache.set( cache_url, @@ -275,7 +314,13 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) expires=expires_time, ) - def cache_response(self, request, response, body=None, status_codes=None): + def cache_response( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + status_codes: Collection[int] | None = None, + ) -> None: """ Algorithm for caching requests. @@ -290,10 +335,14 @@ def cache_response(self, request, response, body=None, status_codes=None): ) return - response_headers = CaseInsensitiveDict(response.headers) + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) else: date = 0 @@ -312,6 +361,7 @@ def cache_response(self, request, response, body=None, status_codes=None): cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Updating cache with response from "%s"', cache_url) @@ -344,11 +394,11 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers.get("expires"): expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date expires_time = max(expires_time, 14 * 86400) - logger.debug("etag object cached for {0} seconds".format(expires_time)) + logger.debug(f"etag object cached for {expires_time} seconds") logger.debug("Caching due to etag") self._cache_set(cache_url, request, response, body, expires_time) @@ -362,11 +412,14 @@ def cache_response(self, request, response, body=None, status_codes=None): # is no date header then we can't do anything about expiring # the cache. elif "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) # cache when there is a max-age > 0 - if "max-age" in cc and cc["max-age"] > 0: + max_age = cc.get("max-age") + if max_age is not None and max_age > 0: logger.debug("Caching b/c date exists and max-age > 0") - expires_time = cc["max-age"] + expires_time = max_age self._cache_set( cache_url, request, @@ -381,12 +434,12 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers["expires"]: expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date else: expires_time = None logger.debug( - "Caching b/c of expires header. expires in {0} seconds".format( + "Caching b/c of expires header. expires in {} seconds".format( expires_time ) ) @@ -398,16 +451,18 @@ def cache_response(self, request, response, body=None, status_codes=None): expires_time, ) - def update_cached_response(self, request, response): + def update_cached_response( + self, request: PreparedRequest, response: HTTPResponse + ) -> HTTPResponse: """On a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. """ + assert request.url is not None cache_url = self.cache_url(request.url) - - cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + cached_response = self._load_from_cache(request) if not cached_response: # we didn't have a cached response @@ -423,11 +478,11 @@ def update_cached_response(self, request, response): excluded_headers = ["content-length"] cached_response.headers.update( - dict( - (k, v) - for k, v in response.headers.items() + { + k: v + for k, v in response.headers.items() # type: ignore[no-untyped-call] if k.lower() not in excluded_headers - ) + } ) # we want a 200 b/c we have content via the cache diff --git a/pipenv/patched/pip/_vendor/cachecontrol/filewrapper.py b/pipenv/patched/pip/_vendor/cachecontrol/filewrapper.py index f5ed5f6f6e..25143902a2 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/filewrapper.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/filewrapper.py @@ -1,12 +1,17 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from tempfile import NamedTemporaryFile import mmap +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Callable + +if TYPE_CHECKING: + from http.client import HTTPResponse -class CallbackFileWrapper(object): +class CallbackFileWrapper: """ Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the @@ -25,12 +30,14 @@ class CallbackFileWrapper(object): performance impact. """ - def __init__(self, fp, callback): + def __init__( + self, fp: HTTPResponse, callback: Callable[[bytes], None] | None + ) -> None: self.__buf = NamedTemporaryFile("rb+", delete=True) self.__fp = fp self.__callback = callback - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: # The vaguaries of garbage collection means that self.__fp is # not always set. By using __getattribute__ and the private # name[0] allows looking up the attribute value and raising an @@ -42,7 +49,7 @@ def __getattr__(self, name): fp = self.__getattribute__("_CallbackFileWrapper__fp") return getattr(fp, name) - def __is_fp_closed(self): + def __is_fp_closed(self) -> bool: try: return self.__fp.fp is None @@ -50,7 +57,8 @@ def __is_fp_closed(self): pass try: - return self.__fp.closed + closed: bool = self.__fp.closed + return closed except AttributeError: pass @@ -59,7 +67,7 @@ def __is_fp_closed(self): # TODO: Add some logging here... return False - def _close(self): + def _close(self) -> None: if self.__callback: if self.__buf.tell() == 0: # Empty file: @@ -86,8 +94,8 @@ def _close(self): # Important when caching big files. self.__buf.close() - def read(self, amt=None): - data = self.__fp.read(amt) + def read(self, amt: int | None = None) -> bytes: + data: bytes = self.__fp.read(amt) if data: # We may be dealing with b'', a sign that things are over: # it's passed e.g. after we've already closed self.__buf. @@ -97,8 +105,8 @@ def read(self, amt=None): return data - def _safe_read(self, amt): - data = self.__fp._safe_read(amt) + def _safe_read(self, amt: int) -> bytes: + data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined] if amt == 2 and data == b"\r\n": # urllib executes this read to toss the CRLF at the end # of the chunk. diff --git a/pipenv/patched/pip/_vendor/cachecontrol/heuristics.py b/pipenv/patched/pip/_vendor/cachecontrol/heuristics.py index ebe4a96f58..f7fd306ba4 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/heuristics.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/heuristics.py @@ -1,29 +1,31 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import calendar import time - +from datetime import datetime, timedelta, timezone from email.utils import formatdate, parsedate, parsedate_tz +from typing import TYPE_CHECKING, Any, Mapping -from datetime import datetime, timedelta +if TYPE_CHECKING: + from pipenv.patched.pip._vendor.urllib3 import HTTPResponse TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" -def expire_after(delta, date=None): - date = date or datetime.utcnow() +def expire_after(delta: timedelta, date: datetime | None = None) -> datetime: + date = date or datetime.now(timezone.utc) return date + delta -def datetime_to_header(dt): +def datetime_to_header(dt: datetime) -> str: return formatdate(calendar.timegm(dt.timetuple())) -class BaseHeuristic(object): - - def warning(self, response): +class BaseHeuristic: + def warning(self, response: HTTPResponse) -> str | None: """ Return a valid 1xx warning header value describing the cache adjustments. @@ -34,7 +36,7 @@ def warning(self, response): """ return '110 - "Response is Stale"' - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: """Update the response headers with any new headers. NOTE: This SHOULD always include some Warning header to @@ -43,7 +45,7 @@ def update_headers(self, response): """ return {} - def apply(self, response): + def apply(self, response: HTTPResponse) -> HTTPResponse: updated_headers = self.update_headers(response) if updated_headers: @@ -61,12 +63,12 @@ class OneDayCache(BaseHeuristic): future. """ - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: headers = {} if "expires" not in response.headers: date = parsedate(response.headers["date"]) - expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc] headers["expires"] = datetime_to_header(expires) headers["cache-control"] = "public" return headers @@ -77,14 +79,14 @@ class ExpiresAfter(BaseHeuristic): Cache **all** requests for a defined time period. """ - def __init__(self, **kw): + def __init__(self, **kw: Any) -> None: self.delta = timedelta(**kw) - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: expires = expire_after(self.delta) return {"expires": datetime_to_header(expires), "cache-control": "public"} - def warning(self, response): + def warning(self, response: HTTPResponse) -> str | None: tmpl = "110 - Automatically cached for %s. Response might be stale" return tmpl % self.delta @@ -101,12 +103,23 @@ class LastModified(BaseHeuristic): http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 Unlike mozilla we limit this to 24-hr. """ + cacheable_by_default_statuses = { - 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + 200, + 203, + 204, + 206, + 300, + 301, + 404, + 405, + 410, + 414, + 501, } - def update_headers(self, resp): - headers = resp.headers + def update_headers(self, resp: HTTPResponse) -> dict[str, str]: + headers: Mapping[str, str] = resp.headers if "expires" in headers: return {} @@ -120,9 +133,11 @@ def update_headers(self, resp): if "date" not in headers or "last-modified" not in headers: return {} - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) last_modified = parsedate(headers["last-modified"]) - if date is None or last_modified is None: + if last_modified is None: return {} now = time.time() @@ -135,5 +150,5 @@ def update_headers(self, resp): expires = date + freshness_lifetime return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} - def warning(self, resp): + def warning(self, resp: HTTPResponse) -> str | None: return None diff --git a/pipenv/patched/pip/_vendor/cachecontrol/serialize.py b/pipenv/patched/pip/_vendor/cachecontrol/serialize.py index 2c2736b4e1..8631f3fdd8 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/serialize.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/serialize.py @@ -1,78 +1,76 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -import base64 import io -import json -import zlib +from typing import IO, TYPE_CHECKING, Any, Mapping, cast from pipenv.patched.pip._vendor import msgpack from pipenv.patched.pip._vendor.requests.structures import CaseInsensitiveDict +from pipenv.patched.pip._vendor.urllib3 import HTTPResponse -from .compat import HTTPResponse, pickle, text_type +if TYPE_CHECKING: + from pipenv.patched.pip._vendor.requests import PreparedRequest -def _b64_decode_bytes(b): - return base64.b64decode(b.encode("ascii")) +class Serializer: + serde_version = "4" - -def _b64_decode_str(s): - return _b64_decode_bytes(s).decode("utf8") - - -_default_body_read = object() - - -class Serializer(object): - def dumps(self, request, response, body=None): - response_headers = CaseInsensitiveDict(response.headers) + def dumps( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + ) -> bytes: + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if body is None: # When a body isn't passed in, we'll read the response. We # also update the response with a new file handler to be # sure it acts as though it was never read. body = response.read(decode_content=False) - response._fp = io.BytesIO(body) - - # NOTE: This is all a bit weird, but it's really important that on - # Python 2.x these objects are unicode and not str, even when - # they contain only ascii. The problem here is that msgpack - # understands the difference between unicode and bytes and we - # have it set to differentiate between them, however Python 2 - # doesn't know the difference. Forcing these to unicode will be - # enough to have msgpack know the difference. + response._fp = io.BytesIO(body) # type: ignore[attr-defined] + response.length_remaining = len(body) + data = { - u"response": { - u"body": body, # Empty bytestring if body is stored separately - u"headers": dict( - (text_type(k), text_type(v)) for k, v in response.headers.items() - ), - u"status": response.status, - u"version": response.version, - u"reason": text_type(response.reason), - u"strict": response.strict, - u"decode_content": response.decode_content, + "response": { + "body": body, # Empty bytestring if body is stored separately + "headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call] + "status": response.status, + "version": response.version, + "reason": str(response.reason), + "decode_content": response.decode_content, } } # Construct our vary headers - data[u"vary"] = {} - if u"vary" in response_headers: - varied_headers = response_headers[u"vary"].split(",") + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers["vary"].split(",") for header in varied_headers: - header = text_type(header).strip() + header = str(header).strip() header_value = request.headers.get(header, None) if header_value is not None: - header_value = text_type(header_value) - data[u"vary"][header] = header_value + header_value = str(header_value) + data["vary"][header] = header_value + + return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)]) - return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + def serialize(self, data: dict[str, Any]) -> bytes: + return cast(bytes, msgpack.dumps(data, use_bin_type=True)) - def loads(self, request, data, body_file=None): + def loads( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: # Short circuit if we've been given an empty set of data if not data: - return + return None # Determine what version of the serializer the data was serialized # with @@ -88,18 +86,23 @@ def loads(self, request, data, body_file=None): ver = b"cc=0" # Get the version number out of the cc=N - ver = ver.split(b"=", 1)[-1].decode("ascii") + verstr = ver.split(b"=", 1)[-1].decode("ascii") # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(ver))(request, data, body_file) + return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return] except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None - return - - def prepare_response(self, request, cached, body_file=None): + return None + + def prepare_response( + self, + request: PreparedRequest, + cached: Mapping[str, Any], + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ @@ -108,23 +111,26 @@ def prepare_response(self, request, cached, body_file=None): # This case is also handled in the controller code when creating # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): - return + return None # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: - return + return None body_raw = cached["response"].pop("body") - headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + data=cached["response"]["headers"] + ) if headers.get("transfer-encoding", "") == "chunked": headers.pop("transfer-encoding") cached["response"]["headers"] = headers try: + body: IO[bytes] if body_file is None: body = io.BytesIO(body_raw) else: @@ -138,53 +144,63 @@ def prepare_response(self, request, cached, body_file=None): # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode("utf8")) + # Discard any `strict` parameter serialized by older version of cachecontrol. + cached["response"].pop("strict", None) + return HTTPResponse(body=body, preload_content=False, **cached["response"]) - def _loads_v0(self, request, data, body_file=None): + def _loads_v0( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. - return - - def _loads_v1(self, request, data, body_file=None): - try: - cached = pickle.loads(data) - except ValueError: - return - - return self.prepare_response(request, cached, body_file) - - def _loads_v2(self, request, data, body_file=None): - assert body_file is None - try: - cached = json.loads(zlib.decompress(data).decode("utf8")) - except (ValueError, zlib.error): - return - - # We need to decode the items that we've base64 encoded - cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) - cached["response"]["headers"] = dict( - (_b64_decode_str(k), _b64_decode_str(v)) - for k, v in cached["response"]["headers"].items() - ) - cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) - cached["vary"] = dict( - (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) - for k, v in cached["vary"].items() - ) - - return self.prepare_response(request, cached, body_file) - - def _loads_v3(self, request, data, body_file): + return None + + def _loads_v1( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v1" pickled cache format. This is no longer supported + # for security reasons, so we treat it as a miss. + return None + + def _loads_v2( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v2" compressed base64 cache format. + # This has been removed due to age and poor size/performance + # characteristics, so we treat it as a miss. + return None + + def _loads_v3( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: # Due to Python 2 encoding issues, it's impossible to know for sure # exactly how to load v3 entries, thus we'll treat these as a miss so # that they get rewritten out as v4 entries. - return - - def _loads_v4(self, request, data, body_file=None): + return None + + def _loads_v4( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: try: cached = msgpack.loads(data, raw=False) except ValueError: - return + return None return self.prepare_response(request, cached, body_file) diff --git a/pipenv/patched/pip/_vendor/cachecontrol/wrapper.py b/pipenv/patched/pip/_vendor/cachecontrol/wrapper.py index b6ee7f2039..be069b9611 100644 --- a/pipenv/patched/pip/_vendor/cachecontrol/wrapper.py +++ b/pipenv/patched/pip/_vendor/cachecontrol/wrapper.py @@ -1,22 +1,32 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from .adapter import CacheControlAdapter -from .cache import DictCache +from typing import TYPE_CHECKING, Collection +from pipenv.patched.pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pipenv.patched.pip._vendor.cachecontrol.cache import DictCache -def CacheControl( - sess, - cache=None, - cache_etags=True, - serializer=None, - heuristic=None, - controller_class=None, - adapter_class=None, - cacheable_methods=None, -): +if TYPE_CHECKING: + from pipenv.patched.pip._vendor import requests + + from pipenv.patched.pip._vendor.cachecontrol.cache import BaseCache + from pipenv.patched.pip._vendor.cachecontrol.controller import CacheController + from pipenv.patched.pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pipenv.patched.pip._vendor.cachecontrol.serialize import Serializer + +def CacheControl( + sess: requests.Session, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + controller_class: type[CacheController] | None = None, + adapter_class: type[CacheControlAdapter] | None = None, + cacheable_methods: Collection[str] | None = None, +) -> requests.Session: cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( diff --git a/pipenv/patched/pip/_vendor/certifi/__init__.py b/pipenv/patched/pip/_vendor/certifi/__init__.py index 705f416d6b..8ce89cef70 100644 --- a/pipenv/patched/pip/_vendor/certifi/__init__.py +++ b/pipenv/patched/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2023.05.07" +__version__ = "2023.07.22" diff --git a/pipenv/patched/pip/_vendor/certifi/cacert.pem b/pipenv/patched/pip/_vendor/certifi/cacert.pem index 7fd6b96a38..02123695d0 100644 --- a/pipenv/patched/pip/_vendor/certifi/cacert.pem +++ b/pipenv/patched/pip/_vendor/certifi/cacert.pem @@ -791,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Label: "SecureSign RootCA11" @@ -1676,7 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= -----END CERTIFICATE----- - # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Label: "T-TeleSec GlobalRoot Class 2" @@ -4354,7 +4325,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- - # Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication RootCA3" @@ -4478,3 +4448,188 @@ AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA 94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B 43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== -----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- diff --git a/pipenv/vendor/pep517/LICENSE b/pipenv/patched/pip/_vendor/truststore/LICENSE similarity index 96% rename from pipenv/vendor/pep517/LICENSE rename to pipenv/patched/pip/_vendor/truststore/LICENSE index b0ae9dbc26..7ec568c113 100644 --- a/pipenv/vendor/pep517/LICENSE +++ b/pipenv/patched/pip/_vendor/truststore/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2017 Thomas Kluyver +Copyright (c) 2022 Seth Michael Larson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pipenv/patched/pip/_vendor/truststore/__init__.py b/pipenv/patched/pip/_vendor/truststore/__init__.py new file mode 100644 index 0000000000..59930f455b --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/__init__.py @@ -0,0 +1,13 @@ +"""Verify certificates using native system trust stores""" + +import sys as _sys + +if _sys.version_info < (3, 10): + raise ImportError("truststore requires Python 3.10 or later") + +from ._api import SSLContext, extract_from_ssl, inject_into_ssl # noqa: E402 + +del _api, _sys # type: ignore[name-defined] # noqa: F821 + +__all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"] +__version__ = "0.8.0" diff --git a/pipenv/patched/pip/_vendor/truststore/_api.py b/pipenv/patched/pip/_vendor/truststore/_api.py new file mode 100644 index 0000000000..60121d58da --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/_api.py @@ -0,0 +1,302 @@ +import os +import platform +import socket +import ssl +import typing + +import _ssl # type: ignore[import] + +from ._ssl_constants import ( + _original_SSLContext, + _original_super_SSLContext, + _truststore_SSLContext_dunder_class, + _truststore_SSLContext_super_class, +) + +if platform.system() == "Windows": + from ._windows import _configure_context, _verify_peercerts_impl +elif platform.system() == "Darwin": + from ._macos import _configure_context, _verify_peercerts_impl +else: + from ._openssl import _configure_context, _verify_peercerts_impl + +if typing.TYPE_CHECKING: + from pipenv.patched.pip._vendor.typing_extensions import Buffer + +# From typeshed/stdlib/ssl.pyi +_StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes] +_PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes] + + +def inject_into_ssl() -> None: + """Injects the :class:`truststore.SSLContext` into the ``ssl`` + module by replacing :class:`ssl.SSLContext`. + """ + setattr(ssl, "SSLContext", SSLContext) + # urllib3 holds on to its own reference of ssl.SSLContext + # so we need to replace that reference too. + try: + import pipenv.patched.pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + setattr(urllib3_ssl, "SSLContext", SSLContext) + except ImportError: + pass + + +def extract_from_ssl() -> None: + """Restores the :class:`ssl.SSLContext` class to its original state""" + setattr(ssl, "SSLContext", _original_SSLContext) + try: + import pipenv.patched.pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + urllib3_ssl.SSLContext = _original_SSLContext + except ImportError: + pass + + +class SSLContext(_truststore_SSLContext_super_class): # type: ignore[misc] + """SSLContext API that uses system certificates on all platforms""" + + @property # type: ignore[misc] + def __class__(self) -> type: + # Dirty hack to get around isinstance() checks + # for ssl.SSLContext instances in aiohttp/trustme + # when using non-CPython implementations. + return _truststore_SSLContext_dunder_class or SSLContext + + def __init__(self, protocol: int = None) -> None: # type: ignore[assignment] + self._ctx = _original_SSLContext(protocol) + + class TruststoreSSLObject(ssl.SSLObject): + # This object exists because wrap_bio() doesn't + # immediately do the handshake so we need to do + # certificate verifications after SSLObject.do_handshake() + + def do_handshake(self) -> None: + ret = super().do_handshake() + _verify_peercerts(self, server_hostname=self.server_hostname) + return ret + + self._ctx.sslobject_class = TruststoreSSLObject + + def wrap_socket( + self, + sock: socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLSocket: + # Use a context manager here because the + # inner SSLContext holds on to our state + # but also does the actual handshake. + with _configure_context(self._ctx): + ssl_sock = self._ctx.wrap_socket( + sock, + server_side=server_side, + server_hostname=server_hostname, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + session=session, + ) + try: + _verify_peercerts(ssl_sock, server_hostname=server_hostname) + except Exception: + ssl_sock.close() + raise + return ssl_sock + + def wrap_bio( + self, + incoming: ssl.MemoryBIO, + outgoing: ssl.MemoryBIO, + server_side: bool = False, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLObject: + with _configure_context(self._ctx): + ssl_obj = self._ctx.wrap_bio( + incoming, + outgoing, + server_hostname=server_hostname, + server_side=server_side, + session=session, + ) + return ssl_obj + + def load_verify_locations( + self, + cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + cadata: typing.Union[str, "Buffer", None] = None, + ) -> None: + return self._ctx.load_verify_locations( + cafile=cafile, capath=capath, cadata=cadata + ) + + def load_cert_chain( + self, + certfile: _StrOrBytesPath, + keyfile: _StrOrBytesPath | None = None, + password: _PasswordType | None = None, + ) -> None: + return self._ctx.load_cert_chain( + certfile=certfile, keyfile=keyfile, password=password + ) + + def load_default_certs( + self, purpose: ssl.Purpose = ssl.Purpose.SERVER_AUTH + ) -> None: + return self._ctx.load_default_certs(purpose) + + def set_alpn_protocols(self, alpn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_alpn_protocols(alpn_protocols) + + def set_npn_protocols(self, npn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_npn_protocols(npn_protocols) + + def set_ciphers(self, __cipherlist: str) -> None: + return self._ctx.set_ciphers(__cipherlist) + + def get_ciphers(self) -> typing.Any: + return self._ctx.get_ciphers() + + def session_stats(self) -> dict[str, int]: + return self._ctx.session_stats() + + def cert_store_stats(self) -> dict[str, int]: + raise NotImplementedError() + + @typing.overload + def get_ca_certs( + self, binary_form: typing.Literal[False] = ... + ) -> list[typing.Any]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: typing.Literal[True] = ...) -> list[bytes]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: bool = ...) -> typing.Any: + ... + + def get_ca_certs(self, binary_form: bool = False) -> list[typing.Any] | list[bytes]: + raise NotImplementedError() + + @property + def check_hostname(self) -> bool: + return self._ctx.check_hostname + + @check_hostname.setter + def check_hostname(self, value: bool) -> None: + self._ctx.check_hostname = value + + @property + def hostname_checks_common_name(self) -> bool: + return self._ctx.hostname_checks_common_name + + @hostname_checks_common_name.setter + def hostname_checks_common_name(self, value: bool) -> None: + self._ctx.hostname_checks_common_name = value + + @property + def keylog_filename(self) -> str: + return self._ctx.keylog_filename + + @keylog_filename.setter + def keylog_filename(self, value: str) -> None: + self._ctx.keylog_filename = value + + @property + def maximum_version(self) -> ssl.TLSVersion: + return self._ctx.maximum_version + + @maximum_version.setter + def maximum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.maximum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def minimum_version(self) -> ssl.TLSVersion: + return self._ctx.minimum_version + + @minimum_version.setter + def minimum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.minimum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def options(self) -> ssl.Options: + return self._ctx.options + + @options.setter + def options(self, value: ssl.Options) -> None: + _original_super_SSLContext.options.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def post_handshake_auth(self) -> bool: + return self._ctx.post_handshake_auth + + @post_handshake_auth.setter + def post_handshake_auth(self, value: bool) -> None: + self._ctx.post_handshake_auth = value + + @property + def protocol(self) -> ssl._SSLMethod: + return self._ctx.protocol + + @property + def security_level(self) -> int: + return self._ctx.security_level + + @property + def verify_flags(self) -> ssl.VerifyFlags: + return self._ctx.verify_flags + + @verify_flags.setter + def verify_flags(self, value: ssl.VerifyFlags) -> None: + _original_super_SSLContext.verify_flags.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def verify_mode(self) -> ssl.VerifyMode: + return self._ctx.verify_mode + + @verify_mode.setter + def verify_mode(self, value: ssl.VerifyMode) -> None: + _original_super_SSLContext.verify_mode.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + +def _verify_peercerts( + sock_or_sslobj: ssl.SSLSocket | ssl.SSLObject, server_hostname: str | None +) -> None: + """ + Verifies the peer certificates from an SSLSocket or SSLObject + against the certificates in the OS trust store. + """ + sslobj: ssl.SSLObject = sock_or_sslobj # type: ignore[assignment] + try: + while not hasattr(sslobj, "get_unverified_chain"): + sslobj = sslobj._sslobj # type: ignore[attr-defined] + except AttributeError: + pass + + # SSLObject.get_unverified_chain() returns 'None' + # if the peer sends no certificates. This is common + # for the server-side scenario. + unverified_chain: typing.Sequence[_ssl.Certificate] = ( + sslobj.get_unverified_chain() or () # type: ignore[attr-defined] + ) + cert_bytes = [cert.public_bytes(_ssl.ENCODING_DER) for cert in unverified_chain] + _verify_peercerts_impl( + sock_or_sslobj.context, cert_bytes, server_hostname=server_hostname + ) diff --git a/pipenv/patched/pip/_vendor/truststore/_macos.py b/pipenv/patched/pip/_vendor/truststore/_macos.py new file mode 100644 index 0000000000..7dc440bf36 --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/_macos.py @@ -0,0 +1,501 @@ +import contextlib +import ctypes +import platform +import ssl +import typing +from ctypes import ( + CDLL, + POINTER, + c_bool, + c_char_p, + c_int32, + c_long, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from ._ssl_constants import _set_ssl_context_verify_mode + +_mac_version = platform.mac_ver()[0] +_mac_version_info = tuple(map(int, _mac_version.split("."))) +if _mac_version_info < (10, 8): + raise ImportError( + f"Only OS X 10.8 and newer are supported, not {_mac_version_info[0]}.{_mac_version_info[1]}" + ) + + +def _load_cdll(name: str, macos10_16_path: str) -> CDLL: + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + path: str | None + if _mac_version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise ImportError(f"The library {name} failed to load") from None + + +Security = _load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = _load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFErrorRef = POINTER(CFError) +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFArrayCallBacks = c_void_p +CFOptionFlags = c_uint32 + +SecCertificateRef = POINTER(c_void_p) +SecPolicyRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 + +try: + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argtypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecPolicyCreateRevocation.argtypes = [CFOptionFlags] + Security.SecPolicyCreateRevocation.restype = SecPolicyRef + + Security.SecPolicyCreateSSL.argtypes = [Boolean, CFStringRef] + Security.SecPolicyCreateSSL.restype = SecPolicyRef + + Security.SecTrustCreateWithCertificates.argtypes = [ + CFTypeRef, + CFTypeRef, + POINTER(SecTrustRef), + ] + Security.SecTrustCreateWithCertificates.restype = OSStatus + + Security.SecTrustGetTrustResult.argtypes = [ + SecTrustRef, + POINTER(SecTrustResultType), + ] + Security.SecTrustGetTrustResult.restype = OSStatus + + Security.SecTrustRef = SecTrustRef # type: ignore[attr-defined] + Security.SecTrustResultType = SecTrustResultType # type: ignore[attr-defined] + Security.OSStatus = OSStatus # type: ignore[attr-defined] + + kSecRevocationUseAnyAvailableMethod = 3 + kSecRevocationRequirePositiveResponse = 8 + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.CFErrorGetCode.argtypes = [CFErrorRef] + CoreFoundation.CFErrorGetCode.restype = CFIndex + + CoreFoundation.CFErrorCopyDescription.argtypes = [CFErrorRef] + CoreFoundation.CFErrorCopyDescription.restype = CFStringRef + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFTypeArrayCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef # type: ignore[attr-defined] + CoreFoundation.CFArrayRef = CFArrayRef # type: ignore[attr-defined] + CoreFoundation.CFStringRef = CFStringRef # type: ignore[attr-defined] + CoreFoundation.CFErrorRef = CFErrorRef # type: ignore[attr-defined] + +except AttributeError: + raise ImportError("Error initializing ctypes") from None + + +def _handle_osstatus(result: OSStatus, _: typing.Any, args: typing.Any) -> typing.Any: + """ + Raises an error if the OSStatus value is non-zero. + """ + if int(result) == 0: + return args + + # Returns a CFString which we need to transform + # into a UTF-8 Python string. + error_message_cfstring = None + try: + error_message_cfstring = Security.SecCopyErrorMessageString(result, None) + + # First step is convert the CFString into a C string pointer. + # We try the fast no-copy way first. + error_message_cfstring_c_void_p = ctypes.cast( + error_message_cfstring, ctypes.POINTER(ctypes.c_void_p) + ) + message = CoreFoundation.CFStringGetCStringPtr( + error_message_cfstring_c_void_p, CFConst.kCFStringEncodingUTF8 + ) + + # Quoting the Apple dev docs: + # + # "A pointer to a C string or NULL if the internal + # storage of theString does not allow this to be + # returned efficiently." + # + # So we need to get our hands dirty. + if message is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + error_message_cfstring_c_void_p, + buffer, + 1024, + CFConst.kCFStringEncodingUTF8, + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + message = buffer.value + + finally: + if error_message_cfstring is not None: + CoreFoundation.CFRelease(error_message_cfstring) + + # If no message can be found for this status we come + # up with a generic one that forwards the status code. + if message is None or message == "": + message = f"SecureTransport operation returned a non-zero OSStatus: {result}" + + raise ssl.SSLError(message) + + +Security.SecTrustCreateWithCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustSetAnchorCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustGetTrustResult.errcheck = _handle_osstatus # type: ignore[assignment] + + +class CFConst: + """CoreFoundation constants""" + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + errSecIncompleteCertRevocationCheck = -67635 + errSecHostNameMismatch = -67602 + errSecCertificateExpired = -67818 + errSecNotTrusted = -67843 + + +def _bytes_to_cf_data_ref(value: bytes) -> CFDataRef: # type: ignore[valid-type] + return CoreFoundation.CFDataCreate( # type: ignore[no-any-return] + CoreFoundation.kCFAllocatorDefault, value, len(value) + ) + + +def _bytes_to_cf_string(value: bytes) -> CFString: + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(value) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str # type: ignore[no-any-return] + + +def _cf_string_ref_to_str(cf_string_ref: CFStringRef) -> str | None: # type: ignore[valid-type] + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + Yes, it annoys me quite a lot that this function is this complex. + """ + + string = CoreFoundation.CFStringGetCStringPtr( + cf_string_ref, CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + cf_string_ref, buffer, 1024, CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + string = buffer.value + if string is not None: + string = string.decode("utf-8") + return string # type: ignore[no-any-return] + + +def _der_certs_to_cf_cert_array(certs: list[bytes]) -> CFMutableArrayRef: # type: ignore[valid-type] + """Builds a CFArray of SecCertificateRefs from a list of DER-encoded certificates. + Responsibility of the caller to call CoreFoundation.CFRelease on the CFArray. + """ + cf_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_array: + raise MemoryError("Unable to allocate memory!") + + for cert_data in certs: + cf_data = None + sec_cert_ref = None + try: + cf_data = _bytes_to_cf_data_ref(cert_data) + sec_cert_ref = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, cf_data + ) + CoreFoundation.CFArrayAppendValue(cf_array, sec_cert_ref) + finally: + if cf_data: + CoreFoundation.CFRelease(cf_data) + if sec_cert_ref: + CoreFoundation.CFRelease(sec_cert_ref) + + return cf_array # type: ignore[no-any-return] + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + certs = None + policies = None + trust = None + cf_error = None + try: + if server_hostname is not None: + cf_str_hostname = None + try: + cf_str_hostname = _bytes_to_cf_string(server_hostname.encode("ascii")) + ssl_policy = Security.SecPolicyCreateSSL(True, cf_str_hostname) + finally: + if cf_str_hostname: + CoreFoundation.CFRelease(cf_str_hostname) + else: + ssl_policy = Security.SecPolicyCreateSSL(True, None) + + policies = ssl_policy + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + # Add explicit policy requiring positive revocation checks + policies = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + CoreFoundation.CFArrayAppendValue(policies, ssl_policy) + CoreFoundation.CFRelease(ssl_policy) + revocation_policy = Security.SecPolicyCreateRevocation( + kSecRevocationUseAnyAvailableMethod + | kSecRevocationRequirePositiveResponse + ) + CoreFoundation.CFArrayAppendValue(policies, revocation_policy) + CoreFoundation.CFRelease(revocation_policy) + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + raise NotImplementedError("VERIFY_CRL_CHECK_LEAF not implemented for macOS") + + certs = None + try: + certs = _der_certs_to_cf_cert_array(cert_chain) + + # Now that we have certificates loaded and a SecPolicy + # we can finally create a SecTrust object! + trust = Security.SecTrustRef() + Security.SecTrustCreateWithCertificates( + certs, policies, ctypes.byref(trust) + ) + + finally: + # The certs are now being held by SecTrust so we can + # release our handles for the array. + if certs: + CoreFoundation.CFRelease(certs) + + # If there are additional trust anchors to load we need to transform + # the list of DER-encoded certificates into a CFArray. Otherwise + # pass 'None' to signal that we only want system / fetched certificates. + ctx_ca_certs_der: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if ctx_ca_certs_der: + ctx_ca_certs = None + try: + ctx_ca_certs = _der_certs_to_cf_cert_array(cert_chain) + Security.SecTrustSetAnchorCertificates(trust, ctx_ca_certs) + finally: + if ctx_ca_certs: + CoreFoundation.CFRelease(ctx_ca_certs) + else: + Security.SecTrustSetAnchorCertificates(trust, None) + + cf_error = CoreFoundation.CFErrorRef() + sec_trust_eval_result = Security.SecTrustEvaluateWithError( + trust, ctypes.byref(cf_error) + ) + # sec_trust_eval_result is a bool (0 or 1) + # where 1 means that the certs are trusted. + if sec_trust_eval_result == 1: + is_trusted = True + elif sec_trust_eval_result == 0: + is_trusted = False + else: + raise ssl.SSLError( + f"Unknown result from Security.SecTrustEvaluateWithError: {sec_trust_eval_result!r}" + ) + + cf_error_code = 0 + if not is_trusted: + cf_error_code = CoreFoundation.CFErrorGetCode(cf_error) + + # If the error is a known failure that we're + # explicitly okay with from SSLContext configuration + # we can set is_trusted accordingly. + if ssl_context.verify_mode != ssl.CERT_REQUIRED and ( + cf_error_code == CFConst.errSecNotTrusted + or cf_error_code == CFConst.errSecCertificateExpired + ): + is_trusted = True + elif ( + not ssl_context.check_hostname + and cf_error_code == CFConst.errSecHostNameMismatch + ): + is_trusted = True + + # If we're still not trusted then we start to + # construct and raise the SSLCertVerificationError. + if not is_trusted: + cf_error_string_ref = None + try: + cf_error_string_ref = CoreFoundation.CFErrorCopyDescription(cf_error) + + # Can this ever return 'None' if there's a CFError? + cf_error_message = ( + _cf_string_ref_to_str(cf_error_string_ref) + or "Certificate verification failed" + ) + + # TODO: Not sure if we need the SecTrustResultType for anything? + # We only care whether or not it's a success or failure for now. + sec_trust_result_type = Security.SecTrustResultType() + Security.SecTrustGetTrustResult( + trust, ctypes.byref(sec_trust_result_type) + ) + + err = ssl.SSLCertVerificationError(cf_error_message) + err.verify_message = cf_error_message + err.verify_code = cf_error_code + raise err + finally: + if cf_error_string_ref: + CoreFoundation.CFRelease(cf_error_string_ref) + + finally: + if policies: + CoreFoundation.CFRelease(policies) + if trust: + CoreFoundation.CFRelease(trust) diff --git a/pipenv/patched/pip/_vendor/truststore/_openssl.py b/pipenv/patched/pip/_vendor/truststore/_openssl.py new file mode 100644 index 0000000000..9951cf75c4 --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/_openssl.py @@ -0,0 +1,66 @@ +import contextlib +import os +import re +import ssl +import typing + +# candidates based on https://github.com/tiran/certifi-system-store by Christian Heimes +_CA_FILE_CANDIDATES = [ + # Alpine, Arch, Fedora 34+, OpenWRT, RHEL 9+, BSD + "/etc/ssl/cert.pem", + # Fedora <= 34, RHEL <= 9, CentOS <= 9 + "/etc/pki/tls/cert.pem", + # Debian, Ubuntu (requires ca-certificates) + "/etc/ssl/certs/ca-certificates.crt", + # SUSE + "/etc/ssl/ca-bundle.pem", +] + +_HASHED_CERT_FILENAME_RE = re.compile(r"^[0-9a-fA-F]{8}\.[0-9]$") + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + # First, check whether the default locations from OpenSSL + # seem like they will give us a usable set of CA certs. + # ssl.get_default_verify_paths already takes care of: + # - getting cafile from either the SSL_CERT_FILE env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # - getting capath from either the SSL_CERT_DIR env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # In addition we'll check whether capath appears to contain certs. + defaults = ssl.get_default_verify_paths() + if defaults.cafile or (defaults.capath and _capath_contains_certs(defaults.capath)): + ctx.set_default_verify_paths() + else: + # cafile from OpenSSL doesn't exist + # and capath from OpenSSL doesn't contain certs. + # Let's search other common locations instead. + for cafile in _CA_FILE_CANDIDATES: + if os.path.isfile(cafile): + ctx.load_verify_locations(cafile=cafile) + break + + yield + + +def _capath_contains_certs(capath: str) -> bool: + """Check whether capath exists and contains certs in the expected format.""" + if not os.path.isdir(capath): + return False + for name in os.listdir(capath): + if _HASHED_CERT_FILENAME_RE.match(name): + return True + return False + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + # This is a no-op because we've enabled SSLContext's built-in + # verification via verify_mode=CERT_REQUIRED, and don't need to repeat it. + pass diff --git a/pipenv/patched/pip/_vendor/truststore/_ssl_constants.py b/pipenv/patched/pip/_vendor/truststore/_ssl_constants.py new file mode 100644 index 0000000000..b1ee7a3cb1 --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/_ssl_constants.py @@ -0,0 +1,31 @@ +import ssl +import sys +import typing + +# Hold on to the original class so we can create it consistently +# even if we inject our own SSLContext into the ssl module. +_original_SSLContext = ssl.SSLContext +_original_super_SSLContext = super(_original_SSLContext, _original_SSLContext) + +# CPython is known to be good, but non-CPython implementations +# may implement SSLContext differently so to be safe we don't +# subclass the SSLContext. + +# This is returned by truststore.SSLContext.__class__() +_truststore_SSLContext_dunder_class: typing.Optional[type] + +# This value is the superclass of truststore.SSLContext. +_truststore_SSLContext_super_class: type + +if sys.implementation.name == "cpython": + _truststore_SSLContext_super_class = _original_SSLContext + _truststore_SSLContext_dunder_class = None +else: + _truststore_SSLContext_super_class = object + _truststore_SSLContext_dunder_class = _original_SSLContext + + +def _set_ssl_context_verify_mode( + ssl_context: ssl.SSLContext, verify_mode: ssl.VerifyMode +) -> None: + _original_super_SSLContext.verify_mode.__set__(ssl_context, verify_mode) # type: ignore[attr-defined] diff --git a/pipenv/patched/pip/_vendor/truststore/_windows.py b/pipenv/patched/pip/_vendor/truststore/_windows.py new file mode 100644 index 0000000000..3de4960a1b --- /dev/null +++ b/pipenv/patched/pip/_vendor/truststore/_windows.py @@ -0,0 +1,554 @@ +import contextlib +import ssl +import typing +from ctypes import WinDLL # type: ignore +from ctypes import WinError # type: ignore +from ctypes import ( + POINTER, + Structure, + c_char_p, + c_ulong, + c_void_p, + c_wchar_p, + cast, + create_unicode_buffer, + pointer, + sizeof, +) +from ctypes.wintypes import ( + BOOL, + DWORD, + HANDLE, + LONG, + LPCSTR, + LPCVOID, + LPCWSTR, + LPFILETIME, + LPSTR, + LPWSTR, +) +from typing import TYPE_CHECKING, Any + +from ._ssl_constants import _set_ssl_context_verify_mode + +HCERTCHAINENGINE = HANDLE +HCERTSTORE = HANDLE +HCRYPTPROV_LEGACY = HANDLE + + +class CERT_CONTEXT(Structure): + _fields_ = ( + ("dwCertEncodingType", DWORD), + ("pbCertEncoded", c_void_p), + ("cbCertEncoded", DWORD), + ("pCertInfo", c_void_p), + ("hCertStore", HCERTSTORE), + ) + + +PCERT_CONTEXT = POINTER(CERT_CONTEXT) +PCCERT_CONTEXT = POINTER(PCERT_CONTEXT) + + +class CERT_ENHKEY_USAGE(Structure): + _fields_ = ( + ("cUsageIdentifier", DWORD), + ("rgpszUsageIdentifier", POINTER(LPSTR)), + ) + + +PCERT_ENHKEY_USAGE = POINTER(CERT_ENHKEY_USAGE) + + +class CERT_USAGE_MATCH(Structure): + _fields_ = ( + ("dwType", DWORD), + ("Usage", CERT_ENHKEY_USAGE), + ) + + +class CERT_CHAIN_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("RequestedUsage", CERT_USAGE_MATCH), + ("RequestedIssuancePolicy", CERT_USAGE_MATCH), + ("dwUrlRetrievalTimeout", DWORD), + ("fCheckRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ("pftCacheResync", LPFILETIME), + ("pStrongSignPara", c_void_p), + ("dwStrongSignFlags", DWORD), + ) + + +if TYPE_CHECKING: + PCERT_CHAIN_PARA = pointer[CERT_CHAIN_PARA] # type: ignore[misc] +else: + PCERT_CHAIN_PARA = POINTER(CERT_CHAIN_PARA) + + +class CERT_TRUST_STATUS(Structure): + _fields_ = ( + ("dwErrorStatus", DWORD), + ("dwInfoStatus", DWORD), + ) + + +class CERT_CHAIN_ELEMENT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("pCertContext", PCERT_CONTEXT), + ("TrustStatus", CERT_TRUST_STATUS), + ("pRevocationInfo", c_void_p), + ("pIssuanceUsage", PCERT_ENHKEY_USAGE), + ("pApplicationUsage", PCERT_ENHKEY_USAGE), + ("pwszExtendedErrorInfo", LPCWSTR), + ) + + +PCERT_CHAIN_ELEMENT = POINTER(CERT_CHAIN_ELEMENT) + + +class CERT_SIMPLE_CHAIN(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cElement", DWORD), + ("rgpElement", POINTER(PCERT_CHAIN_ELEMENT)), + ("pTrustListInfo", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_SIMPLE_CHAIN = POINTER(CERT_SIMPLE_CHAIN) + + +class CERT_CHAIN_CONTEXT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cChain", DWORD), + ("rgpChain", POINTER(PCERT_SIMPLE_CHAIN)), + ("cLowerQualityChainContext", DWORD), + ("rgpLowerQualityChainContext", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_CHAIN_CONTEXT = POINTER(CERT_CHAIN_CONTEXT) +PCCERT_CHAIN_CONTEXT = POINTER(PCERT_CHAIN_CONTEXT) + + +class SSL_EXTRA_CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwAuthType", DWORD), + ("fdwChecks", DWORD), + ("pwszServerName", LPCWSTR), + ) + + +class CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwFlags", DWORD), + ("pvExtraPolicyPara", c_void_p), + ) + + +PCERT_CHAIN_POLICY_PARA = POINTER(CERT_CHAIN_POLICY_PARA) + + +class CERT_CHAIN_POLICY_STATUS(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwError", DWORD), + ("lChainIndex", LONG), + ("lElementIndex", LONG), + ("pvExtraPolicyStatus", c_void_p), + ) + + +PCERT_CHAIN_POLICY_STATUS = POINTER(CERT_CHAIN_POLICY_STATUS) + + +class CERT_CHAIN_ENGINE_CONFIG(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("hRestrictedRoot", HCERTSTORE), + ("hRestrictedTrust", HCERTSTORE), + ("hRestrictedOther", HCERTSTORE), + ("cAdditionalStore", DWORD), + ("rghAdditionalStore", c_void_p), + ("dwFlags", DWORD), + ("dwUrlRetrievalTimeout", DWORD), + ("MaximumCachedCertificates", DWORD), + ("CycleDetectionModulus", DWORD), + ("hExclusiveRoot", HCERTSTORE), + ("hExclusiveTrustedPeople", HCERTSTORE), + ("dwExclusiveFlags", DWORD), + ) + + +PCERT_CHAIN_ENGINE_CONFIG = POINTER(CERT_CHAIN_ENGINE_CONFIG) +PHCERTCHAINENGINE = POINTER(HCERTCHAINENGINE) + +X509_ASN_ENCODING = 0x00000001 +PKCS_7_ASN_ENCODING = 0x00010000 +CERT_STORE_PROV_MEMORY = b"Memory" +CERT_STORE_ADD_USE_EXISTING = 2 +USAGE_MATCH_TYPE_OR = 1 +OID_PKIX_KP_SERVER_AUTH = c_char_p(b"1.3.6.1.5.5.7.3.1") +CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000 +CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000 +CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = 0x00000007 +CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008 +CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010 +CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040 +CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020 +CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080 +CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = 0x00000F00 +CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000 +CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000 +AUTHTYPE_SERVER = 2 +CERT_CHAIN_POLICY_SSL = 4 +FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 +FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200 + +# Flags to set for SSLContext.verify_mode=CERT_NONE +CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS = ( + CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS + | CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG + | CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + | CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG + | CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS + | CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG + | CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG +) + +wincrypt = WinDLL("crypt32.dll") +kernel32 = WinDLL("kernel32.dll") + + +def _handle_win_error(result: bool, _: Any, args: Any) -> Any: + if not result: + # Note, actually raises OSError after calling GetLastError and FormatMessage + raise WinError() + return args + + +CertCreateCertificateChainEngine = wincrypt.CertCreateCertificateChainEngine +CertCreateCertificateChainEngine.argtypes = ( + PCERT_CHAIN_ENGINE_CONFIG, + PHCERTCHAINENGINE, +) +CertCreateCertificateChainEngine.errcheck = _handle_win_error + +CertOpenStore = wincrypt.CertOpenStore +CertOpenStore.argtypes = (LPCSTR, DWORD, HCRYPTPROV_LEGACY, DWORD, c_void_p) +CertOpenStore.restype = HCERTSTORE +CertOpenStore.errcheck = _handle_win_error + +CertAddEncodedCertificateToStore = wincrypt.CertAddEncodedCertificateToStore +CertAddEncodedCertificateToStore.argtypes = ( + HCERTSTORE, + DWORD, + c_char_p, + DWORD, + DWORD, + PCCERT_CONTEXT, +) +CertAddEncodedCertificateToStore.restype = BOOL + +CertCreateCertificateContext = wincrypt.CertCreateCertificateContext +CertCreateCertificateContext.argtypes = (DWORD, c_char_p, DWORD) +CertCreateCertificateContext.restype = PCERT_CONTEXT +CertCreateCertificateContext.errcheck = _handle_win_error + +CertGetCertificateChain = wincrypt.CertGetCertificateChain +CertGetCertificateChain.argtypes = ( + HCERTCHAINENGINE, + PCERT_CONTEXT, + LPFILETIME, + HCERTSTORE, + PCERT_CHAIN_PARA, + DWORD, + c_void_p, + PCCERT_CHAIN_CONTEXT, +) +CertGetCertificateChain.restype = BOOL +CertGetCertificateChain.errcheck = _handle_win_error + +CertVerifyCertificateChainPolicy = wincrypt.CertVerifyCertificateChainPolicy +CertVerifyCertificateChainPolicy.argtypes = ( + c_ulong, + PCERT_CHAIN_CONTEXT, + PCERT_CHAIN_POLICY_PARA, + PCERT_CHAIN_POLICY_STATUS, +) +CertVerifyCertificateChainPolicy.restype = BOOL + +CertCloseStore = wincrypt.CertCloseStore +CertCloseStore.argtypes = (HCERTSTORE, DWORD) +CertCloseStore.restype = BOOL +CertCloseStore.errcheck = _handle_win_error + +CertFreeCertificateChain = wincrypt.CertFreeCertificateChain +CertFreeCertificateChain.argtypes = (PCERT_CHAIN_CONTEXT,) + +CertFreeCertificateContext = wincrypt.CertFreeCertificateContext +CertFreeCertificateContext.argtypes = (PCERT_CONTEXT,) + +CertFreeCertificateChainEngine = wincrypt.CertFreeCertificateChainEngine +CertFreeCertificateChainEngine.argtypes = (HCERTCHAINENGINE,) + +FormatMessageW = kernel32.FormatMessageW +FormatMessageW.argtypes = ( + DWORD, + LPCVOID, + DWORD, + DWORD, + LPWSTR, + DWORD, + c_void_p, +) +FormatMessageW.restype = DWORD + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + """Verify the cert_chain from the server using Windows APIs.""" + pCertContext = None + hIntermediateCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add intermediate certs to an in-memory cert store + for cert_bytes in cert_chain[1:]: + CertAddEncodedCertificateToStore( + hIntermediateCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Cert context for leaf cert + leaf_cert = cert_chain[0] + pCertContext = CertCreateCertificateContext( + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, leaf_cert, len(leaf_cert) + ) + + # Chain params to match certs for serverAuth extended usage + cert_enhkey_usage = CERT_ENHKEY_USAGE() + cert_enhkey_usage.cUsageIdentifier = 1 + cert_enhkey_usage.rgpszUsageIdentifier = (c_char_p * 1)(OID_PKIX_KP_SERVER_AUTH) + cert_usage_match = CERT_USAGE_MATCH() + cert_usage_match.Usage = cert_enhkey_usage + chain_params = CERT_CHAIN_PARA() + chain_params.RequestedUsage = cert_usage_match + chain_params.cbSize = sizeof(chain_params) + pChainPara = pointer(chain_params) + + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_CHAIN + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_END_CERT + else: + chain_flags = 0 + + try: + # First attempt to verify using the default Windows system trust roots + # (default chain engine). + _get_and_verify_cert_chain( + ssl_context, + None, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + except ssl.SSLCertVerificationError: + # If that fails but custom CA certs have been added + # to the SSLContext using load_verify_locations, + # try verifying using a custom chain engine + # that trusts the custom CA certs. + custom_ca_certs: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if custom_ca_certs: + _verify_using_custom_ca_certs( + ssl_context, + custom_ca_certs, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + else: + raise + finally: + CertCloseStore(hIntermediateCertStore, 0) + if pCertContext: + CertFreeCertificateContext(pCertContext) + + +def _get_and_verify_cert_chain( + ssl_context: ssl.SSLContext, + hChainEngine: HCERTCHAINENGINE | None, + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + ppChainContext = None + try: + # Get cert chain + ppChainContext = pointer(PCERT_CHAIN_CONTEXT()) + CertGetCertificateChain( + hChainEngine, # chain engine + pPeerCertContext, # leaf cert context + None, # current system time + hIntermediateCertStore, # additional in-memory cert store + pChainPara, # chain-building parameters + chain_flags, + None, # reserved + ppChainContext, # the resulting chain context + ) + pChainContext = ppChainContext.contents + + # Verify cert chain + ssl_extra_cert_chain_policy_para = SSL_EXTRA_CERT_CHAIN_POLICY_PARA() + ssl_extra_cert_chain_policy_para.cbSize = sizeof( + ssl_extra_cert_chain_policy_para + ) + ssl_extra_cert_chain_policy_para.dwAuthType = AUTHTYPE_SERVER + ssl_extra_cert_chain_policy_para.fdwChecks = 0 + if server_hostname: + ssl_extra_cert_chain_policy_para.pwszServerName = c_wchar_p(server_hostname) + + chain_policy = CERT_CHAIN_POLICY_PARA() + chain_policy.pvExtraPolicyPara = cast( + pointer(ssl_extra_cert_chain_policy_para), c_void_p + ) + if ssl_context.verify_mode == ssl.CERT_NONE: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS + if not ssl_context.check_hostname: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + chain_policy.cbSize = sizeof(chain_policy) + + pPolicyPara = pointer(chain_policy) + policy_status = CERT_CHAIN_POLICY_STATUS() + policy_status.cbSize = sizeof(policy_status) + pPolicyStatus = pointer(policy_status) + CertVerifyCertificateChainPolicy( + CERT_CHAIN_POLICY_SSL, + pChainContext, + pPolicyPara, + pPolicyStatus, + ) + + # Check status + error_code = policy_status.dwError + if error_code: + # Try getting a human readable message for an error code. + error_message_buf = create_unicode_buffer(1024) + error_message_chars = FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + None, + error_code, + 0, + error_message_buf, + sizeof(error_message_buf), + None, + ) + + # See if we received a message for the error, + # otherwise we use a generic error with the + # error code and hope that it's search-able. + if error_message_chars <= 0: + error_message = f"Certificate chain policy error {error_code:#x} [{policy_status.lElementIndex}]" + else: + error_message = error_message_buf.value.strip() + + err = ssl.SSLCertVerificationError(error_message) + err.verify_message = error_message + err.verify_code = error_code + raise err from None + finally: + if ppChainContext: + CertFreeCertificateChain(ppChainContext.contents) + + +def _verify_using_custom_ca_certs( + ssl_context: ssl.SSLContext, + custom_ca_certs: list[bytes], + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + hChainEngine = None + hRootCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add custom CA certs to an in-memory cert store + for cert_bytes in custom_ca_certs: + CertAddEncodedCertificateToStore( + hRootCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Create a custom cert chain engine which exclusively trusts + # certs from our hRootCertStore + cert_chain_engine_config = CERT_CHAIN_ENGINE_CONFIG() + cert_chain_engine_config.cbSize = sizeof(cert_chain_engine_config) + cert_chain_engine_config.hExclusiveRoot = hRootCertStore + pConfig = pointer(cert_chain_engine_config) + phChainEngine = pointer(HCERTCHAINENGINE()) + CertCreateCertificateChainEngine( + pConfig, + phChainEngine, + ) + hChainEngine = phChainEngine.contents + + # Get and verify a cert chain using the custom chain engine + _get_and_verify_cert_chain( + ssl_context, + hChainEngine, + hIntermediateCertStore, + pPeerCertContext, + pChainPara, + server_hostname, + chain_flags, + ) + finally: + if hChainEngine: + CertFreeCertificateChainEngine(hChainEngine) + CertCloseStore(hRootCertStore, 0) + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) diff --git a/pipenv/patched/pip/_vendor/urllib3/_version.py b/pipenv/patched/pip/_vendor/urllib3/_version.py index d69ca31457..cad75fb5df 100644 --- a/pipenv/patched/pip/_vendor/urllib3/_version.py +++ b/pipenv/patched/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.16" +__version__ = "1.26.17" diff --git a/pipenv/patched/pip/_vendor/urllib3/request.py b/pipenv/patched/pip/_vendor/urllib3/request.py index 398386a5b9..3b4cf99922 100644 --- a/pipenv/patched/pip/_vendor/urllib3/request.py +++ b/pipenv/patched/pip/_vendor/urllib3/request.py @@ -1,6 +1,9 @@ from __future__ import absolute_import +import sys + from .filepost import encode_multipart_formdata +from .packages import six from .packages.six.moves.urllib.parse import urlencode __all__ = ["RequestMethods"] @@ -168,3 +171,21 @@ def request_encode_body( extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) + + +if not six.PY2: + + class RequestModule(sys.modules[__name__].__class__): + def __call__(self, *args, **kwargs): + """ + If user tries to call this module directly urllib3 v2.x style raise an error to the user + suggesting they may need urllib3 v2 + """ + raise TypeError( + "'module' object is not callable\n" + "urllib3.request() method is not supported in this release, " + "upgrade to urllib3 v2 to use it\n" + "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html" + ) + + sys.modules[__name__].__class__ = RequestModule diff --git a/pipenv/patched/pip/_vendor/urllib3/util/retry.py b/pipenv/patched/pip/_vendor/urllib3/util/retry.py index 2490d5e5b6..60ef6c4f3f 100644 --- a/pipenv/patched/pip/_vendor/urllib3/util/retry.py +++ b/pipenv/patched/pip/_vendor/urllib3/util/retry.py @@ -235,7 +235,7 @@ class Retry(object): RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) #: Maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/pipenv/patched/pip/_vendor/vendor.txt b/pipenv/patched/pip/_vendor/vendor.txt index 4ab2915fb8..8dbe134137 100644 --- a/pipenv/patched/pip/_vendor/vendor.txt +++ b/pipenv/patched/pip/_vendor/vendor.txt @@ -1,4 +1,4 @@ -CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 distlib==0.3.6 distro==1.8.0 @@ -8,10 +8,10 @@ platformdirs==3.8.1 pyparsing==3.1.0 pyproject-hooks==1.0.0 requests==2.31.0 - certifi==2023.5.7 + certifi==2023.7.22 chardet==5.1.0 idna==3.4 - urllib3==1.26.16 + urllib3==1.26.17 rich==13.4.2 pygments==2.15.1 typing_extensions==4.7.1 @@ -20,4 +20,5 @@ setuptools==68.0.0 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 +truststore==0.8.0 webencodings==0.5.1 diff --git a/pipenv/pipenv.1 b/pipenv/pipenv.1 index 2cbb266351..66cea4faef 100644 --- a/pipenv/pipenv.1 +++ b/pipenv/pipenv.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "PIPENV" "1" "Sep 08, 2023" "2023.9.8" "pipenv" +.TH "PIPENV" "1" "Oct 20, 2023" "2023.10.20" "pipenv" .sp \fBNOTE:\fP .INDENT 0.0 diff --git a/pipenv/project.py b/pipenv/project.py index 71e3dbb93e..d384209af6 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -1090,12 +1090,24 @@ def get_package_name_in_pipfile(self, package_name, category): return name return None + def _sort_category(self, category): + # toml tables won't maintain sorted dictionary order + # so construct the table in the order that we need + sorted_category = dict(sorted(category.items())) + table = tomlkit.table() + for key, value in sorted_category.items(): + table.add(key, value) + + return table + def remove_package_from_pipfile(self, package_name, category): # Read and append Pipfile. name = self.get_package_name_in_pipfile(package_name, category=category) p = self.parsed_pipfile if name: del p[category][name] + if self.settings.get("sort_pipfile"): + p[category] = self._sort_category(p[category]) self.write_toml(p) return True return False @@ -1206,6 +1218,9 @@ def add_pipfile_entry_to_pipfile(self, name, normalized_name, entry, category=No p[category][normalized_name] = entry + if self.settings.get("sort_pipfile"): + p[category] = self._sort_category(p[category]) + # Write Pipfile. self.write_toml(p) return newly_added, category, normalized_name diff --git a/pipenv/routines/check.py b/pipenv/routines/check.py index c2f5d12feb..a067a41e74 100644 --- a/pipenv/routines/check.py +++ b/pipenv/routines/check.py @@ -1,5 +1,6 @@ import io import json as simplejson +import logging import os import sys import tempfile @@ -12,6 +13,40 @@ from pipenv.vendor import click, plette +def build_options( + audit_and_monitor=True, + exit_code=True, + output="screen", + save_json="", + policy_file="", + safety_project=None, + temp_requirements_name="", +): + options = [ + "--audit-and-monitor" if audit_and_monitor else "--disable-audit-and-monitor", + "--exit-code" if exit_code else "--continue-on-error", + ] + formats = {"full-report": "--full-report", "minimal": "--json"} + + if output in formats: + options.append(formats.get(output, "")) + elif output not in ["screen", "default"]: + options.append(f"--output={output}") + + if save_json: + options.append(f"--save-json={save_json}") + + if policy_file: + options.append(f"--policy-file={policy_file}") + + if safety_project: + options.append(f"--project={safety_project}") + + options.extend(["--file", temp_requirements_name]) + + return options + + def do_check( project, python=False, @@ -21,6 +56,7 @@ def do_check( output="screen", key=None, quiet=False, + verbose=False, exit_code=True, policy_file="", save_json="", @@ -32,6 +68,9 @@ def do_check( ): import json + if not verbose: + logging.getLogger("pipenv").setLevel(logging.WARN) + if not system: # Ensure that virtualenv is available. ensure_project( @@ -120,27 +159,6 @@ def do_check( else: ignored = [] - options = [ - "--audit-and-monitor" if audit_and_monitor else "--disable-audit-and-monitor", - "--exit-code" if exit_code else "--continue-on-error", - ] - - formats = {"full-report": "--full-report", "minimal": "--json"} - if output in formats: - options.append(formats.get(output, "")) - - elif output not in ["screen", "default"]: - options.append(f"--output={output}") - - if save_json: - options.append(f"--save-json={save_json}") - - if policy_file: - options.append(f"--policy-file={policy_file}") - - if safety_project: - options.append(f"--project={safety_project}") - if use_installed: target_venv_packages = run_command( _cmd + ["-m", "pip", "list", "--format=freeze"], @@ -165,7 +183,15 @@ def do_check( temp_requirements.write(target_venv_packages.stdout.strip()) temp_requirements.close() - options.extend(["--file", temp_requirements.name]) + options = build_options( + audit_and_monitor=audit_and_monitor, + exit_code=exit_code, + output=output, + save_json=save_json, + policy_file=policy_file, + safety_project=safety_project, + temp_requirements_name=temp_requirements.name, + ) cmd = _cmd + [safety_path, "check"] + options diff --git a/pipenv/routines/shell.py b/pipenv/routines/shell.py index 0d3089e8a2..a4a4df6218 100644 --- a/pipenv/routines/shell.py +++ b/pipenv/routines/shell.py @@ -9,7 +9,9 @@ from pipenv.vendor import click -def do_shell(project, python=False, fancy=False, shell_args=None, pypi_mirror=None): +def do_shell( + project, python=False, fancy=False, shell_args=None, pypi_mirror=None, quiet=False +): # Ensure that virtualenv is available. ensure_project( project, @@ -25,7 +27,8 @@ def do_shell(project, python=False, fancy=False, shell_args=None, pypi_mirror=No from pipenv.shells import choose_shell shell = choose_shell(project) - click.echo("Launching subshell in virtual environment...", err=True) + if not quiet: + click.echo("Launching subshell in virtual environment...", err=True) fork_args = ( project.virtualenv_location, diff --git a/pipenv/utils/dependencies.py b/pipenv/utils/dependencies.py index eae7f387c3..bc6fd1572d 100644 --- a/pipenv/utils/dependencies.py +++ b/pipenv/utils/dependencies.py @@ -1019,6 +1019,20 @@ def _file_path_from_pipfile(path_obj, pipfile_entry): return req_str +def normalize_vcs_url(vcs_url): + """Return vcs_url and possible vcs_ref from a given vcs_url.""" + # We have to handle the fact that some vcs urls have a ref in them + # and some have a netloc with a username and password in them, and some have both + vcs_ref = "" + if "@" in vcs_url: + parsed_url = urlparse(vcs_url) + if "@" in parsed_url.path: + url_parts = vcs_url.rsplit("@", 1) + vcs_url = url_parts[0] + vcs_ref = url_parts[1] + return vcs_url, vcs_ref + + def install_req_from_pipfile(name, pipfile): """Creates an InstallRequirement from a name and a pipfile entry. Handles VCS, local & remote paths, and regular named requirements. @@ -1045,14 +1059,7 @@ def install_req_from_pipfile(name, pipfile): subdirectory = _pipfile.get("subdirectory", "") if subdirectory: subdirectory = f"#subdirectory={subdirectory}" - fallback_ref = "" - if ("ssh://" in vcs_url and vcs_url.count("@") >= 2) or ( - "ssh://" not in vcs_url and "@" in vcs_url - ): - vcs_url_parts = vcs_url.rsplit("@", 1) - if re.match(r"^[\w\.]+$", vcs_url_parts[1]): - vcs_url = vcs_url_parts[0] - fallback_ref = vcs_url_parts[1] + vcs_url, fallback_ref = normalize_vcs_url(vcs_url) req_str = f"{vcs_url}@{_pipfile.get('ref', fallback_ref)}{extras_str}" if not req_str.lstrip().startswith(f"{vcs}+"): req_str = f"{vcs}+{req_str}" diff --git a/pipenv/utils/requirements.py b/pipenv/utils/requirements.py index 739944432b..44b9c14fec 100644 --- a/pipenv/utils/requirements.py +++ b/pipenv/utils/requirements.py @@ -1,7 +1,7 @@ import os import re -import urllib.parse from typing import Tuple +from urllib.parse import quote, unquote from pipenv.patched.pip._internal.network.session import PipSession from pipenv.patched.pip._internal.req import parse_requirements @@ -41,7 +41,7 @@ def redact_netloc(netloc: str) -> Tuple[str]: else: # If it is, leave it as is password = ":" + password - user = urllib.parse.quote(user) + user = quote(user) return (f"{user}{password}@{netloc}",) @@ -86,7 +86,7 @@ def import_requirements(project, r=None, dev=False, categories=None): if package.editable: package_string = f"-e {package.link}" else: - package_string = urllib.parse.unquote( + package_string = unquote( redact_auth_from_url(package.original_link.url) ) @@ -143,7 +143,7 @@ def add_index_to_pipfile(project, index, trusted_hosts=None): def requirement_from_lockfile( package_name, package_info, include_hashes=True, include_markers=True ): - from pipenv.utils.dependencies import is_editable_path, is_star + from pipenv.utils.dependencies import is_editable_path, is_star, normalize_vcs_url # Handle string requirements if isinstance(package_info, str): @@ -166,36 +166,31 @@ def requirement_from_lockfile( # Handling vcs repositories for vcs in VCS_LIST: if vcs in package_info: - url = package_info[vcs] + vcs_url = package_info[vcs] ref = package_info.get("ref", "") - if ("ssh://" in url and url.count("@") >= 2) or ( - "ssh://" not in url and "@" in url - ): - url_parts = url.rsplit("@", 1) - # Check if the second part matches the criteria to be a ref (vcs URLs would likely have a /) - if re.match(r"^[\w\.]+$", url_parts[1]): - url = url_parts[0] - if not ref: - ref = url_parts[1] - + # We have to handle the fact that some vcs urls have a ref in them + # and some have a netloc with a username and password in them, and some have both + vcs_url, fallback_ref = normalize_vcs_url(vcs_url) + if not ref: + ref = fallback_ref extras = ( "[{}]".format(",".join(package_info.get("extras", []))) if "extras" in package_info else "" ) subdirectory = package_info.get("subdirectory", "") - include_vcs = "" if f"{vcs}+" in url else f"{vcs}+" - egg_fragment = "" if "#egg=" in url else f"#egg={package_name}" - ref_str = "" if not ref or f"@{ref}" in url else f"@{ref}" + include_vcs = "" if f"{vcs}+" in vcs_url else f"{vcs}+" + egg_fragment = "" if "#egg=" in vcs_url else f"#egg={package_name}" + ref_str = "" if not ref or f"@{ref}" in vcs_url else f"@{ref}" if ( - is_editable_path(url) - or "file://" in url + is_editable_path(vcs_url) + or "file://" in vcs_url or package_info.get("editable", False) ): - pip_line = f"-e {include_vcs}{url}{ref_str}{egg_fragment}{extras}" + pip_line = f"-e {include_vcs}{vcs_url}{ref_str}{egg_fragment}{extras}" pip_line += f"&subdirectory={subdirectory}" if subdirectory else "" else: - pip_line = f"{package_name}{extras}@ {include_vcs}{url}{ref_str}" + pip_line = f"{package_name}{extras}@ {include_vcs}{vcs_url}{ref_str}" pip_line += f"#subdirectory={subdirectory}" if subdirectory else "" return pip_line # Handling file-sourced packages diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index da537b2e22..5376d379e0 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -147,7 +147,7 @@ def __repr__(self): ) @staticmethod - @lru_cache() + @lru_cache def _get_pip_command(): return InstallCommand(name="InstallCommand", summary="pip Install command.") @@ -467,15 +467,29 @@ def _get_pipfile_markers(self, pipfile_entry): return " and ".join(combined_markers).strip() - def _fold_markers(self, dependency_tree, install_req): + def _fold_markers(self, dependency_tree, install_req, checked_dependencies=None): + if checked_dependencies is None: + checked_dependencies = set() + + if install_req.name is None: + return None # Or handle this edge case differently + comes_from = dependency_tree[install_req.name] + # Check for recursion loop + if install_req.name in checked_dependencies: + return None # Or raise an error or handle cyclic dependencies differently + + checked_dependencies.add(install_req.name) + if comes_from == "Pipfile": pipfile_entry = self.pipfile_entries.get(install_req.name) if pipfile_entry and isinstance(pipfile_entry, dict): return self._get_pipfile_markers(pipfile_entry) else: - markers = self._fold_markers(dependency_tree, comes_from) + markers = self._fold_markers( + dependency_tree, comes_from, checked_dependencies + ) if markers: self.markers_lookup[install_req.name] = markers return markers @@ -934,7 +948,7 @@ def resolve_deps( return results, internal_resolver -@lru_cache() +@lru_cache def get_pipenv_sitedir() -> Optional[str]: site_dir = next( iter(d for d in pkg_resources.working_set if d.key.lower() == "pipenv"), None diff --git a/pipenv/utils/shell.py b/pipenv/utils/shell.py index dd7795aba0..50d2b9cfc5 100644 --- a/pipenv/utils/shell.py +++ b/pipenv/utils/shell.py @@ -19,7 +19,7 @@ from .processes import subprocess_run -@lru_cache() +@lru_cache def make_posix(path: str) -> str: """ Convert a path with possible windows-style separators to a posix-style path diff --git a/pipenv/vendor/click/LICENSE.md b/pipenv/vendor/click/LICENSE.rst similarity index 100% rename from pipenv/vendor/click/LICENSE.md rename to pipenv/vendor/click/LICENSE.rst diff --git a/pipenv/vendor/markupsafe/LICENSE.md b/pipenv/vendor/markupsafe/LICENSE.rst similarity index 100% rename from pipenv/vendor/markupsafe/LICENSE.md rename to pipenv/vendor/markupsafe/LICENSE.rst diff --git a/pipenv/vendor/pep517/__init__.py b/pipenv/vendor/pep517/__init__.py deleted file mode 100644 index 38ea0f5f11..0000000000 --- a/pipenv/vendor/pep517/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Wrappers to build Python packages using PEP 517 hooks -""" - -__version__ = '0.13.0' - -from .wrappers import * # noqa: F401, F403 diff --git a/pipenv/vendor/pep517/_compat.py b/pipenv/vendor/pep517/_compat.py deleted file mode 100644 index 78d94af98a..0000000000 --- a/pipenv/vendor/pep517/_compat.py +++ /dev/null @@ -1,8 +0,0 @@ -__all__ = ("tomllib",) - -import sys - -if sys.version_info >= (3, 11): - import tomllib -else: - from pipenv.patched.pip._vendor import tomli as tomllib diff --git a/pipenv/vendor/pep517/build.py b/pipenv/vendor/pep517/build.py deleted file mode 100644 index b30909c870..0000000000 --- a/pipenv/vendor/pep517/build.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Build a project using PEP 517 hooks. -""" -import argparse -import logging -import os -import shutil -import tempfile - -from ._compat import tomllib -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -def validate_system(system): - """ - Ensure build system has the requisite fields. - """ - required = {'requires', 'build-backend'} - if not (required <= set(system)): - message = "Missing required fields: {missing}".format( - missing=required-set(system), - ) - raise ValueError(message) - - -def load_system(source_dir): - """ - Load the build system from a source dir (pyproject.toml). - """ - pyproject = os.path.join(source_dir, 'pyproject.toml') - with open(pyproject, 'rb') as f: - pyproject_data = tomllib.load(f) - return pyproject_data['build-system'] - - -def compat_system(source_dir): - """ - Given a source dir, attempt to get a build system backend - and requirements from pyproject.toml. Fallback to - setuptools but only if the file was not found or a build - system was not indicated. - """ - try: - system = load_system(source_dir) - except (FileNotFoundError, KeyError): - system = {} - system.setdefault( - 'build-backend', - 'setuptools.build_meta:__legacy__', - ) - system.setdefault('requires', ['setuptools', 'wheel']) - return system - - -def _do_build(hooks, env, dist, dest): - get_requires_name = 'get_requires_for_build_{dist}'.format(**locals()) - get_requires = getattr(hooks, get_requires_name) - reqs = get_requires({}) - log.info('Got build requires: %s', reqs) - - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - - with tempfile.TemporaryDirectory() as td: - log.info('Trying to build %s in %s', dist, td) - build_name = 'build_{dist}'.format(**locals()) - build = getattr(hooks, build_name) - filename = build(td, {}) - source = os.path.join(td, filename) - shutil.move(source, os.path.join(dest, os.path.basename(filename))) - - -def build(source_dir, dist, dest=None, system=None): - system = system or load_system(source_dir) - dest = os.path.join(source_dir, dest or 'dist') - os.makedirs(dest, exist_ok=True) - - validate_system(system) - hooks = Pep517HookCaller( - source_dir, system['build-backend'], system.get('backend-path') - ) - - with BuildEnvironment() as env: - env.pip_install(system['requires']) - _do_build(hooks, env, dist, dest) - - -parser = argparse.ArgumentParser() -parser.add_argument( - 'source_dir', - help="A directory containing pyproject.toml", -) -parser.add_argument( - '--binary', '-b', - action='store_true', - default=False, -) -parser.add_argument( - '--source', '-s', - action='store_true', - default=False, -) -parser.add_argument( - '--out-dir', '-o', - help="Destination in which to save the builds relative to source dir", -) - - -def main(args): - log.warning('pep517.build is deprecated. ' - 'Consider switching to https://pypi.org/project/build/') - - # determine which dists to build - dists = list(filter(None, ( - 'sdist' if args.source or not args.binary else None, - 'wheel' if args.binary or not args.source else None, - ))) - - for dist in dists: - build(args.source_dir, dist, args.out_dir) - - -if __name__ == '__main__': - main(parser.parse_args()) diff --git a/pipenv/vendor/pep517/check.py b/pipenv/vendor/pep517/check.py deleted file mode 100644 index b79f6270b4..0000000000 --- a/pipenv/vendor/pep517/check.py +++ /dev/null @@ -1,207 +0,0 @@ -"""Check a project and backend by attempting to build using PEP 517 hooks. -""" -import argparse -import logging -import os -import shutil -import sys -import tarfile -import zipfile -from os.path import isfile -from os.path import join as pjoin -from subprocess import CalledProcessError -from tempfile import mkdtemp - -from ._compat import tomllib -from .colorlog import enable_colourful_output -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -def check_build_sdist(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_sdist({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build sdist in %s', td) - try: - try: - filename = hooks.build_sdist(td, {}) - log.info('build_sdist returned %r', filename) - except Exception: - log.info('Failure in build_sdist', exc_info=True) - return False - - if not filename.endswith('.tar.gz'): - log.error( - "Filename %s doesn't have .tar.gz extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if tarfile.is_tarfile(path): - log.info("Output file is a tar file") - else: - log.error("Output file is not a tar file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check_build_wheel(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_wheel({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build wheel in %s', td) - try: - try: - filename = hooks.build_wheel(td, {}) - log.info('build_wheel returned %r', filename) - except Exception: - log.info('Failure in build_wheel', exc_info=True) - return False - - if not filename.endswith('.whl'): - log.error("Filename %s doesn't have .whl extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if zipfile.is_zipfile(path): - log.info("Output file is a zip file") - else: - log.error("Output file is not a zip file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check(source_dir): - pyproject = pjoin(source_dir, 'pyproject.toml') - if isfile(pyproject): - log.info('Found pyproject.toml') - else: - log.error('Missing pyproject.toml') - return False - - try: - with open(pyproject, 'rb') as f: - pyproject_data = tomllib.load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - backend_path = buildsys.get('backend-path') - log.info('Loaded pyproject.toml') - except (tomllib.TOMLDecodeError, KeyError): - log.error("Invalid pyproject.toml", exc_info=True) - return False - - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - sdist_ok = check_build_sdist(hooks, requires) - wheel_ok = check_build_wheel(hooks, requires) - - if not sdist_ok: - log.warning('Sdist checks failed; scroll up to see') - if not wheel_ok: - log.warning('Wheel checks failed') - - return sdist_ok - - -def main(argv=None): - log.warning('pep517.check is deprecated. ' - 'Consider switching to https://pypi.org/project/build/') - - ap = argparse.ArgumentParser() - ap.add_argument( - 'source_dir', - help="A directory containing pyproject.toml") - args = ap.parse_args(argv) - - enable_colourful_output() - - ok = check(args.source_dir) - - if ok: - print(ansi('Checks passed', 'green')) - else: - print(ansi('Checks failed', 'red')) - sys.exit(1) - - -ansi_codes = { - 'reset': '\x1b[0m', - 'bold': '\x1b[1m', - 'red': '\x1b[31m', - 'green': '\x1b[32m', -} - - -def ansi(s, attr): - if os.name != 'nt' and sys.stdout.isatty(): - return ansi_codes[attr] + str(s) + ansi_codes['reset'] - else: - return str(s) - - -if __name__ == '__main__': - main() diff --git a/pipenv/vendor/pep517/colorlog.py b/pipenv/vendor/pep517/colorlog.py deleted file mode 100644 index 66310a79a9..0000000000 --- a/pipenv/vendor/pep517/colorlog.py +++ /dev/null @@ -1,113 +0,0 @@ -"""Nicer log formatting with colours. - -Code copied from Tornado, Apache licensed. -""" -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import sys - -try: - import curses -except ImportError: - curses = None - - -def _stderr_supports_color(): - color = False - if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): - try: - curses.setupterm() - if curses.tigetnum("colors") > 0: - color = True - except Exception: - pass - return color - - -class LogFormatter(logging.Formatter): - """Log formatter with colour support - """ - DEFAULT_COLORS = { - logging.INFO: 2, # Green - logging.WARNING: 3, # Yellow - logging.ERROR: 1, # Red - logging.CRITICAL: 1, - } - - def __init__(self, color=True, datefmt=None): - r""" - :arg bool color: Enables color support. - :arg string fmt: Log message format. - It will be applied to the attributes dict of log records. The - text between ``%(color)s`` and ``%(end_color)s`` will be colored - depending on the level if color support is on. - :arg dict colors: color mappings from logging level to terminal color - code - :arg string datefmt: Datetime format. - Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. - .. versionchanged:: 3.2 - Added ``fmt`` and ``datefmt`` arguments. - """ - logging.Formatter.__init__(self, datefmt=datefmt) - self._colors = {} - if color and _stderr_supports_color(): - # The curses module has some str/bytes confusion in - # python3. Until version 3.2.3, most methods return - # bytes, but only accept strings. In addition, we want to - # output these strings with the logging module, which - # works with unicode strings. The explicit calls to - # unicode() below are harmless in python2 but will do the - # right conversion in python 3. - fg_color = (curses.tigetstr("setaf") or - curses.tigetstr("setf") or "") - - for levelno, code in self.DEFAULT_COLORS.items(): - self._colors[levelno] = str( - curses.tparm(fg_color, code), "ascii") - self._normal = str(curses.tigetstr("sgr0"), "ascii") - - scr = curses.initscr() - self.termwidth = scr.getmaxyx()[1] - curses.endwin() - else: - self._normal = '' - # Default width is usually 80, but too wide is - # worse than too narrow - self.termwidth = 70 - - def formatMessage(self, record): - mlen = len(record.message) - right_text = '{initial}-{name}'.format(initial=record.levelname[0], - name=record.name) - if mlen + len(right_text) < self.termwidth: - space = ' ' * (self.termwidth - (mlen + len(right_text))) - else: - space = ' ' - - if record.levelno in self._colors: - start_color = self._colors[record.levelno] - end_color = self._normal - else: - start_color = end_color = '' - - return record.message + space + start_color + right_text + end_color - - -def enable_colourful_output(level=logging.INFO): - handler = logging.StreamHandler() - handler.setFormatter(LogFormatter()) - logging.root.addHandler(handler) - logging.root.setLevel(level) diff --git a/pipenv/vendor/pep517/dirtools.py b/pipenv/vendor/pep517/dirtools.py deleted file mode 100644 index 3eff4d801b..0000000000 --- a/pipenv/vendor/pep517/dirtools.py +++ /dev/null @@ -1,19 +0,0 @@ -import io -import os -import zipfile - - -def dir_to_zipfile(root): - """Construct an in-memory zip file for a directory.""" - buffer = io.BytesIO() - zip_file = zipfile.ZipFile(buffer, 'w') - for root, dirs, files in os.walk(root): - for path in dirs: - fs_path = os.path.join(root, path) - rel_path = os.path.relpath(fs_path, root) - zip_file.writestr(rel_path + '/', '') - for path in files: - fs_path = os.path.join(root, path) - rel_path = os.path.relpath(fs_path, root) - zip_file.write(fs_path, rel_path) - return zip_file diff --git a/pipenv/vendor/pep517/envbuild.py b/pipenv/vendor/pep517/envbuild.py deleted file mode 100644 index c0415c4d73..0000000000 --- a/pipenv/vendor/pep517/envbuild.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Build wheels/sdists by installing build deps to a temporary environment. -""" - -import logging -import os -import shutil -import sys -from subprocess import check_call -from sysconfig import get_paths -from tempfile import mkdtemp - -from ._compat import tomllib -from .wrappers import LoggerWrapper, Pep517HookCaller - -log = logging.getLogger(__name__) - - -def _load_pyproject(source_dir): - with open( - os.path.join(source_dir, 'pyproject.toml'), - 'rb', - ) as f: - pyproject_data = tomllib.load(f) - buildsys = pyproject_data['build-system'] - return ( - buildsys['requires'], - buildsys['build-backend'], - buildsys.get('backend-path'), - ) - - -class BuildEnvironment: - """Context manager to install build deps in a simple temporary environment - - Based on code I wrote for pip, which is MIT licensed. - """ - # Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file) - # - # Permission is hereby granted, free of charge, to any person obtaining - # a copy of this software and associated documentation files (the - # "Software"), to deal in the Software without restriction, including - # without limitation the rights to use, copy, modify, merge, publish, - # distribute, sublicense, and/or sell copies of the Software, and to - # permit persons to whom the Software is furnished to do so, subject to - # the following conditions: - # - # The above copyright notice and this permission notice shall be - # included in all copies or substantial portions of the Software. - # - # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - path = None - - def __init__(self, cleanup=True): - self._cleanup = cleanup - - def __enter__(self): - self.path = mkdtemp(prefix='pep517-build-env-') - log.info('Temporary build environment: %s', self.path) - - self.save_path = os.environ.get('PATH', None) - self.save_pythonpath = os.environ.get('PYTHONPATH', None) - - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' - install_dirs = get_paths(install_scheme, vars={ - 'base': self.path, - 'platbase': self.path, - }) - - scripts = install_dirs['scripts'] - if self.save_path: - os.environ['PATH'] = scripts + os.pathsep + self.save_path - else: - os.environ['PATH'] = scripts + os.pathsep + os.defpath - - if install_dirs['purelib'] == install_dirs['platlib']: - lib_dirs = install_dirs['purelib'] - else: - lib_dirs = install_dirs['purelib'] + os.pathsep + \ - install_dirs['platlib'] - if self.save_pythonpath: - os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \ - self.save_pythonpath - else: - os.environ['PYTHONPATH'] = lib_dirs - - return self - - def pip_install(self, reqs): - """Install dependencies into this env by calling pip in a subprocess""" - if not reqs: - return - log.info('Calling pip to install %s', reqs) - cmd = [ - sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', self.path] + list(reqs) - check_call( - cmd, - stdout=LoggerWrapper(log, logging.INFO), - stderr=LoggerWrapper(log, logging.ERROR), - ) - - def __exit__(self, exc_type, exc_val, exc_tb): - needs_cleanup = ( - self._cleanup and - self.path is not None and - os.path.isdir(self.path) - ) - if needs_cleanup: - shutil.rmtree(self.path) - - if self.save_path is None: - os.environ.pop('PATH', None) - else: - os.environ['PATH'] = self.save_path - - if self.save_pythonpath is None: - os.environ.pop('PYTHONPATH', None) - else: - os.environ['PYTHONPATH'] = self.save_pythonpath - - -def build_wheel(source_dir, wheel_dir, config_settings=None): - """Build a wheel from a source directory using PEP 517 hooks. - - :param str source_dir: Source directory containing pyproject.toml - :param str wheel_dir: Target directory to create wheel in - :param dict config_settings: Options to pass to build backend - - This is a blocking function which will run pip in a subprocess to install - build requirements. - """ - if config_settings is None: - config_settings = {} - requires, backend, backend_path = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - with BuildEnvironment() as env: - env.pip_install(requires) - reqs = hooks.get_requires_for_build_wheel(config_settings) - env.pip_install(reqs) - return hooks.build_wheel(wheel_dir, config_settings) - - -def build_sdist(source_dir, sdist_dir, config_settings=None): - """Build an sdist from a source directory using PEP 517 hooks. - - :param str source_dir: Source directory containing pyproject.toml - :param str sdist_dir: Target directory to place sdist in - :param dict config_settings: Options to pass to build backend - - This is a blocking function which will run pip in a subprocess to install - build requirements. - """ - if config_settings is None: - config_settings = {} - requires, backend, backend_path = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - with BuildEnvironment() as env: - env.pip_install(requires) - reqs = hooks.get_requires_for_build_sdist(config_settings) - env.pip_install(reqs) - return hooks.build_sdist(sdist_dir, config_settings) diff --git a/pipenv/vendor/pep517/in_process/__init__.py b/pipenv/vendor/pep517/in_process/__init__.py deleted file mode 100644 index 281a356cfe..0000000000 --- a/pipenv/vendor/pep517/in_process/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -"""This is a subpackage because the directory is on sys.path for _in_process.py - -The subpackage should stay as empty as possible to avoid shadowing modules that -the backend might import. -""" -from contextlib import contextmanager -from os.path import abspath, dirname -from os.path import join as pjoin - -try: - import importlib.resources as resources - try: - resources.files - except AttributeError: - # Python 3.8 compatibility - def _in_proc_script_path(): - return resources.path(__package__, '_in_process.py') - else: - def _in_proc_script_path(): - return resources.as_file( - resources.files(__package__).joinpath('_in_process.py')) -except ImportError: - # Python 3.6 compatibility - @contextmanager - def _in_proc_script_path(): - yield pjoin(dirname(abspath(__file__)), '_in_process.py') diff --git a/pipenv/vendor/pep517/in_process/_in_process.py b/pipenv/vendor/pep517/in_process/_in_process.py deleted file mode 100644 index ae4cf9e9ce..0000000000 --- a/pipenv/vendor/pep517/in_process/_in_process.py +++ /dev/null @@ -1,351 +0,0 @@ -"""This is invoked in a subprocess to call the build backend hooks. - -It expects: -- Command line args: hook_name, control_dir -- Environment variables: - PEP517_BUILD_BACKEND=entry.point:spec - PEP517_BACKEND_PATH=paths (separated with os.pathsep) -- control_dir/input.json: - - {"kwargs": {...}} - -Results: -- control_dir/output.json - - {"return_val": ...} -""" -import json -import os -import os.path -import re -import shutil -import sys -import traceback -from glob import glob -from importlib import import_module -from os.path import join as pjoin - -# This file is run as a script, and `import wrappers` is not zip-safe, so we -# include write_json() and read_json() from wrappers.py. - - -def write_json(obj, path, **kwargs): - with open(path, 'w', encoding='utf-8') as f: - json.dump(obj, f, **kwargs) - - -def read_json(path): - with open(path, encoding='utf-8') as f: - return json.load(f) - - -class BackendUnavailable(Exception): - """Raised if we cannot import the backend""" - def __init__(self, traceback): - self.traceback = traceback - - -class BackendInvalid(Exception): - """Raised if the backend is invalid""" - def __init__(self, message): - self.message = message - - -class HookMissing(Exception): - """Raised if a hook is missing and we are not executing the fallback""" - def __init__(self, hook_name=None): - super().__init__(hook_name) - self.hook_name = hook_name - - -def contained_in(filename, directory): - """Test if a file is located within the given directory.""" - filename = os.path.normcase(os.path.abspath(filename)) - directory = os.path.normcase(os.path.abspath(directory)) - return os.path.commonprefix([filename, directory]) == directory - - -def _build_backend(): - """Find and load the build backend""" - # Add in-tree backend directories to the front of sys.path. - backend_path = os.environ.get('PEP517_BACKEND_PATH') - if backend_path: - extra_pathitems = backend_path.split(os.pathsep) - sys.path[:0] = extra_pathitems - - ep = os.environ['PEP517_BUILD_BACKEND'] - mod_path, _, obj_path = ep.partition(':') - try: - obj = import_module(mod_path) - except ImportError: - raise BackendUnavailable(traceback.format_exc()) - - if backend_path: - if not any( - contained_in(obj.__file__, path) - for path in extra_pathitems - ): - raise BackendInvalid("Backend was not loaded from backend-path") - - if obj_path: - for path_part in obj_path.split('.'): - obj = getattr(obj, path_part) - return obj - - -def _supported_features(): - """Return the list of options features supported by the backend. - - Returns a list of strings. - The only possible value is 'build_editable'. - """ - backend = _build_backend() - features = [] - if hasattr(backend, "build_editable"): - features.append("build_editable") - return features - - -def get_requires_for_build_wheel(config_settings): - """Invoke the optional get_requires_for_build_wheel hook - - Returns [] if the hook is not defined. - """ - backend = _build_backend() - try: - hook = backend.get_requires_for_build_wheel - except AttributeError: - return [] - else: - return hook(config_settings) - - -def get_requires_for_build_editable(config_settings): - """Invoke the optional get_requires_for_build_editable hook - - Returns [] if the hook is not defined. - """ - backend = _build_backend() - try: - hook = backend.get_requires_for_build_editable - except AttributeError: - return [] - else: - return hook(config_settings) - - -def prepare_metadata_for_build_wheel( - metadata_directory, config_settings, _allow_fallback): - """Invoke optional prepare_metadata_for_build_wheel - - Implements a fallback by building a wheel if the hook isn't defined, - unless _allow_fallback is False in which case HookMissing is raised. - """ - backend = _build_backend() - try: - hook = backend.prepare_metadata_for_build_wheel - except AttributeError: - if not _allow_fallback: - raise HookMissing() - whl_basename = backend.build_wheel(metadata_directory, config_settings) - return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, - config_settings) - else: - return hook(metadata_directory, config_settings) - - -def prepare_metadata_for_build_editable( - metadata_directory, config_settings, _allow_fallback): - """Invoke optional prepare_metadata_for_build_editable - - Implements a fallback by building an editable wheel if the hook isn't - defined, unless _allow_fallback is False in which case HookMissing is - raised. - """ - backend = _build_backend() - try: - hook = backend.prepare_metadata_for_build_editable - except AttributeError: - if not _allow_fallback: - raise HookMissing() - try: - build_hook = backend.build_editable - except AttributeError: - raise HookMissing(hook_name='build_editable') - else: - whl_basename = build_hook(metadata_directory, config_settings) - return _get_wheel_metadata_from_wheel(whl_basename, - metadata_directory, - config_settings) - else: - return hook(metadata_directory, config_settings) - - -WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL' - - -def _dist_info_files(whl_zip): - """Identify the .dist-info folder inside a wheel ZipFile.""" - res = [] - for path in whl_zip.namelist(): - m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path) - if m: - res.append(path) - if res: - return res - raise Exception("No .dist-info folder found in wheel") - - -def _get_wheel_metadata_from_wheel( - whl_basename, metadata_directory, config_settings): - """Extract the metadata from a wheel. - - Fallback for when the build backend does not - define the 'get_wheel_metadata' hook. - """ - from zipfile import ZipFile - with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'): - pass # Touch marker file - - whl_file = os.path.join(metadata_directory, whl_basename) - with ZipFile(whl_file) as zipf: - dist_info = _dist_info_files(zipf) - zipf.extractall(path=metadata_directory, members=dist_info) - return dist_info[0].split('/')[0] - - -def _find_already_built_wheel(metadata_directory): - """Check for a wheel already built during the get_wheel_metadata hook. - """ - if not metadata_directory: - return None - metadata_parent = os.path.dirname(metadata_directory) - if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): - return None - - whl_files = glob(os.path.join(metadata_parent, '*.whl')) - if not whl_files: - print('Found wheel built marker, but no .whl files') - return None - if len(whl_files) > 1: - print('Found multiple .whl files; unspecified behaviour. ' - 'Will call build_wheel.') - return None - - # Exactly one .whl file - return whl_files[0] - - -def build_wheel(wheel_directory, config_settings, metadata_directory=None): - """Invoke the mandatory build_wheel hook. - - If a wheel was already built in the - prepare_metadata_for_build_wheel fallback, this - will copy it rather than rebuilding the wheel. - """ - prebuilt_whl = _find_already_built_wheel(metadata_directory) - if prebuilt_whl: - shutil.copy2(prebuilt_whl, wheel_directory) - return os.path.basename(prebuilt_whl) - - return _build_backend().build_wheel(wheel_directory, config_settings, - metadata_directory) - - -def build_editable(wheel_directory, config_settings, metadata_directory=None): - """Invoke the optional build_editable hook. - - If a wheel was already built in the - prepare_metadata_for_build_editable fallback, this - will copy it rather than rebuilding the wheel. - """ - backend = _build_backend() - try: - hook = backend.build_editable - except AttributeError: - raise HookMissing() - else: - prebuilt_whl = _find_already_built_wheel(metadata_directory) - if prebuilt_whl: - shutil.copy2(prebuilt_whl, wheel_directory) - return os.path.basename(prebuilt_whl) - - return hook(wheel_directory, config_settings, metadata_directory) - - -def get_requires_for_build_sdist(config_settings): - """Invoke the optional get_requires_for_build_wheel hook - - Returns [] if the hook is not defined. - """ - backend = _build_backend() - try: - hook = backend.get_requires_for_build_sdist - except AttributeError: - return [] - else: - return hook(config_settings) - - -class _DummyException(Exception): - """Nothing should ever raise this exception""" - - -class GotUnsupportedOperation(Exception): - """For internal use when backend raises UnsupportedOperation""" - def __init__(self, traceback): - self.traceback = traceback - - -def build_sdist(sdist_directory, config_settings): - """Invoke the mandatory build_sdist hook.""" - backend = _build_backend() - try: - return backend.build_sdist(sdist_directory, config_settings) - except getattr(backend, 'UnsupportedOperation', _DummyException): - raise GotUnsupportedOperation(traceback.format_exc()) - - -HOOK_NAMES = { - 'get_requires_for_build_wheel', - 'prepare_metadata_for_build_wheel', - 'build_wheel', - 'get_requires_for_build_editable', - 'prepare_metadata_for_build_editable', - 'build_editable', - 'get_requires_for_build_sdist', - 'build_sdist', - '_supported_features', -} - - -def main(): - if len(sys.argv) < 3: - sys.exit("Needs args: hook_name, control_dir") - hook_name = sys.argv[1] - control_dir = sys.argv[2] - if hook_name not in HOOK_NAMES: - sys.exit("Unknown hook: %s" % hook_name) - hook = globals()[hook_name] - - hook_input = read_json(pjoin(control_dir, 'input.json')) - - json_out = {'unsupported': False, 'return_val': None} - try: - json_out['return_val'] = hook(**hook_input['kwargs']) - except BackendUnavailable as e: - json_out['no_backend'] = True - json_out['traceback'] = e.traceback - except BackendInvalid as e: - json_out['backend_invalid'] = True - json_out['backend_error'] = e.message - except GotUnsupportedOperation as e: - json_out['unsupported'] = True - json_out['traceback'] = e.traceback - except HookMissing as e: - json_out['hook_missing'] = True - json_out['missing_hook_name'] = e.hook_name or hook_name - - write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) - - -if __name__ == '__main__': - main() diff --git a/pipenv/vendor/pep517/meta.py b/pipenv/vendor/pep517/meta.py deleted file mode 100644 index 4afc3c047a..0000000000 --- a/pipenv/vendor/pep517/meta.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Build metadata for a project using PEP 517 hooks. -""" -import argparse -import functools -import logging -import os -import shutil -import tempfile - -try: - import importlib.metadata as imp_meta -except ImportError: - import importlib_metadata as imp_meta - -try: - from zipfile import Path -except ImportError: - from zipp import Path - -from .build import compat_system, load_system, validate_system -from .dirtools import dir_to_zipfile -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller, quiet_subprocess_runner - -log = logging.getLogger(__name__) - - -def _prep_meta(hooks, env, dest): - reqs = hooks.get_requires_for_build_wheel({}) - log.info('Got build requires: %s', reqs) - - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - - with tempfile.TemporaryDirectory() as td: - log.info('Trying to build metadata in %s', td) - filename = hooks.prepare_metadata_for_build_wheel(td, {}) - source = os.path.join(td, filename) - shutil.move(source, os.path.join(dest, os.path.basename(filename))) - - -def build(source_dir='.', dest=None, system=None): - system = system or load_system(source_dir) - dest = os.path.join(source_dir, dest or 'dist') - os.makedirs(dest, exist_ok=True) - validate_system(system) - hooks = Pep517HookCaller( - source_dir, system['build-backend'], system.get('backend-path') - ) - - with hooks.subprocess_runner(quiet_subprocess_runner): - with BuildEnvironment() as env: - env.pip_install(system['requires']) - _prep_meta(hooks, env, dest) - - -def build_as_zip(builder=build): - with tempfile.TemporaryDirectory() as out_dir: - builder(dest=out_dir) - return dir_to_zipfile(out_dir) - - -def load(root): - """ - Given a source directory (root) of a package, - return an importlib.metadata.Distribution object - with metadata build from that package. - """ - root = os.path.expanduser(root) - system = compat_system(root) - builder = functools.partial(build, source_dir=root, system=system) - path = Path(build_as_zip(builder)) - return imp_meta.PathDistribution(path) - - -parser = argparse.ArgumentParser() -parser.add_argument( - 'source_dir', - help="A directory containing pyproject.toml", -) -parser.add_argument( - '--out-dir', '-o', - help="Destination in which to save the builds relative to source dir", -) - - -def main(): - args = parser.parse_args() - build(args.source_dir, args.out_dir) - - -if __name__ == '__main__': - main() diff --git a/pipenv/vendor/pep517/wrappers.py b/pipenv/vendor/pep517/wrappers.py deleted file mode 100644 index 987a62aaa9..0000000000 --- a/pipenv/vendor/pep517/wrappers.py +++ /dev/null @@ -1,362 +0,0 @@ -import json -import os -import sys -import tempfile -import threading -from contextlib import contextmanager -from os.path import abspath -from os.path import join as pjoin -from subprocess import STDOUT, check_call, check_output - -from .in_process import _in_proc_script_path - -__all__ = [ - 'BackendUnavailable', - 'BackendInvalid', - 'HookMissing', - 'UnsupportedOperation', - 'default_subprocess_runner', - 'quiet_subprocess_runner', - 'Pep517HookCaller', -] - - -def write_json(obj, path, **kwargs): - with open(path, 'w', encoding='utf-8') as f: - json.dump(obj, f, **kwargs) - - -def read_json(path): - with open(path, encoding='utf-8') as f: - return json.load(f) - - -class BackendUnavailable(Exception): - """Will be raised if the backend cannot be imported in the hook process.""" - def __init__(self, traceback): - self.traceback = traceback - - -class BackendInvalid(Exception): - """Will be raised if the backend is invalid.""" - def __init__(self, backend_name, backend_path, message): - self.backend_name = backend_name - self.backend_path = backend_path - self.message = message - - -class HookMissing(Exception): - """Will be raised on missing hooks.""" - def __init__(self, hook_name): - super().__init__(hook_name) - self.hook_name = hook_name - - -class UnsupportedOperation(Exception): - """May be raised by build_sdist if the backend indicates that it can't.""" - def __init__(self, traceback): - self.traceback = traceback - - -def default_subprocess_runner(cmd, cwd=None, extra_environ=None): - """The default method of calling the wrapper subprocess.""" - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - - check_call(cmd, cwd=cwd, env=env) - - -def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): - """A method of calling the wrapper subprocess while suppressing output.""" - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - - check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) - - -def norm_and_check(source_tree, requested): - """Normalise and check a backend path. - - Ensure that the requested backend path is specified as a relative path, - and resolves to a location under the given source tree. - - Return an absolute version of the requested path. - """ - if os.path.isabs(requested): - raise ValueError("paths must be relative") - - abs_source = os.path.abspath(source_tree) - abs_requested = os.path.normpath(os.path.join(abs_source, requested)) - # We have to use commonprefix for Python 2.7 compatibility. So we - # normalise case to avoid problems because commonprefix is a character - # based comparison :-( - norm_source = os.path.normcase(abs_source) - norm_requested = os.path.normcase(abs_requested) - if os.path.commonprefix([norm_source, norm_requested]) != norm_source: - raise ValueError("paths must be inside source tree") - - return abs_requested - - -class Pep517HookCaller: - """A wrapper around a source directory to be built with a PEP 517 backend. - - :param source_dir: The path to the source directory, containing - pyproject.toml. - :param build_backend: The build backend spec, as per PEP 517, from - pyproject.toml. - :param backend_path: The backend path, as per PEP 517, from pyproject.toml. - :param runner: A callable that invokes the wrapper subprocess. - :param python_executable: The Python executable used to invoke the backend - - The 'runner', if provided, must expect the following: - - - cmd: a list of strings representing the command and arguments to - execute, as would be passed to e.g. 'subprocess.check_call'. - - cwd: a string representing the working directory that must be - used for the subprocess. Corresponds to the provided source_dir. - - extra_environ: a dict mapping environment variable names to values - which must be set for the subprocess execution. - """ - def __init__( - self, - source_dir, - build_backend, - backend_path=None, - runner=None, - python_executable=None, - ): - if runner is None: - runner = default_subprocess_runner - - self.source_dir = abspath(source_dir) - self.build_backend = build_backend - if backend_path: - backend_path = [ - norm_and_check(self.source_dir, p) for p in backend_path - ] - self.backend_path = backend_path - self._subprocess_runner = runner - if not python_executable: - python_executable = sys.executable - self.python_executable = python_executable - - @contextmanager - def subprocess_runner(self, runner): - """A context manager for temporarily overriding the default subprocess - runner. - """ - prev = self._subprocess_runner - self._subprocess_runner = runner - try: - yield - finally: - self._subprocess_runner = prev - - def _supported_features(self): - """Return the list of optional features supported by the backend.""" - return self._call_hook('_supported_features', {}) - - def get_requires_for_build_wheel(self, config_settings=None): - """Identify packages required for building a wheel - - Returns a list of dependency specifications, e.g.:: - - ["wheel >= 0.25", "setuptools"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. - """ - return self._call_hook('get_requires_for_build_wheel', { - 'config_settings': config_settings - }) - - def prepare_metadata_for_build_wheel( - self, metadata_directory, config_settings=None, - _allow_fallback=True): - """Prepare a ``*.dist-info`` folder with metadata for this project. - - Returns the name of the newly created folder. - - If the build backend defines a hook with this name, it will be called - in a subprocess. If not, the backend will be asked to build a wheel, - and the dist-info extracted from that (unless _allow_fallback is - False). - """ - return self._call_hook('prepare_metadata_for_build_wheel', { - 'metadata_directory': abspath(metadata_directory), - 'config_settings': config_settings, - '_allow_fallback': _allow_fallback, - }) - - def build_wheel( - self, wheel_directory, config_settings=None, - metadata_directory=None): - """Build a wheel from this project. - - Returns the name of the newly created file. - - In general, this will call the 'build_wheel' hook in the backend. - However, if that was previously called by - 'prepare_metadata_for_build_wheel', and the same metadata_directory is - used, the previously built wheel will be copied to wheel_directory. - """ - if metadata_directory is not None: - metadata_directory = abspath(metadata_directory) - return self._call_hook('build_wheel', { - 'wheel_directory': abspath(wheel_directory), - 'config_settings': config_settings, - 'metadata_directory': metadata_directory, - }) - - def get_requires_for_build_editable(self, config_settings=None): - """Identify packages required for building an editable wheel - - Returns a list of dependency specifications, e.g.:: - - ["wheel >= 0.25", "setuptools"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. - """ - return self._call_hook('get_requires_for_build_editable', { - 'config_settings': config_settings - }) - - def prepare_metadata_for_build_editable( - self, metadata_directory, config_settings=None, - _allow_fallback=True): - """Prepare a ``*.dist-info`` folder with metadata for this project. - - Returns the name of the newly created folder. - - If the build backend defines a hook with this name, it will be called - in a subprocess. If not, the backend will be asked to build an editable - wheel, and the dist-info extracted from that (unless _allow_fallback is - False). - """ - return self._call_hook('prepare_metadata_for_build_editable', { - 'metadata_directory': abspath(metadata_directory), - 'config_settings': config_settings, - '_allow_fallback': _allow_fallback, - }) - - def build_editable( - self, wheel_directory, config_settings=None, - metadata_directory=None): - """Build an editable wheel from this project. - - Returns the name of the newly created file. - - In general, this will call the 'build_editable' hook in the backend. - However, if that was previously called by - 'prepare_metadata_for_build_editable', and the same metadata_directory - is used, the previously built wheel will be copied to wheel_directory. - """ - if metadata_directory is not None: - metadata_directory = abspath(metadata_directory) - return self._call_hook('build_editable', { - 'wheel_directory': abspath(wheel_directory), - 'config_settings': config_settings, - 'metadata_directory': metadata_directory, - }) - - def get_requires_for_build_sdist(self, config_settings=None): - """Identify packages required for building a wheel - - Returns a list of dependency specifications, e.g.:: - - ["setuptools >= 26"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. - """ - return self._call_hook('get_requires_for_build_sdist', { - 'config_settings': config_settings - }) - - def build_sdist(self, sdist_directory, config_settings=None): - """Build an sdist from this project. - - Returns the name of the newly created file. - - This calls the 'build_sdist' backend hook in a subprocess. - """ - return self._call_hook('build_sdist', { - 'sdist_directory': abspath(sdist_directory), - 'config_settings': config_settings, - }) - - def _call_hook(self, hook_name, kwargs): - extra_environ = {'PEP517_BUILD_BACKEND': self.build_backend} - - if self.backend_path: - backend_path = os.pathsep.join(self.backend_path) - extra_environ['PEP517_BACKEND_PATH'] = backend_path - - with tempfile.TemporaryDirectory() as td: - hook_input = {'kwargs': kwargs} - write_json(hook_input, pjoin(td, 'input.json'), indent=2) - - # Run the hook in a subprocess - with _in_proc_script_path() as script: - python = self.python_executable - self._subprocess_runner( - [python, abspath(str(script)), hook_name, td], - cwd=self.source_dir, - extra_environ=extra_environ - ) - - data = read_json(pjoin(td, 'output.json')) - if data.get('unsupported'): - raise UnsupportedOperation(data.get('traceback', '')) - if data.get('no_backend'): - raise BackendUnavailable(data.get('traceback', '')) - if data.get('backend_invalid'): - raise BackendInvalid( - backend_name=self.build_backend, - backend_path=self.backend_path, - message=data.get('backend_error', '') - ) - if data.get('hook_missing'): - raise HookMissing(data.get('missing_hook_name') or hook_name) - return data['return_val'] - - -class LoggerWrapper(threading.Thread): - """ - Read messages from a pipe and redirect them - to a logger (see python's logging module). - """ - - def __init__(self, logger, level): - threading.Thread.__init__(self) - self.daemon = True - - self.logger = logger - self.level = level - - # create the pipe and reader - self.fd_read, self.fd_write = os.pipe() - self.reader = os.fdopen(self.fd_read) - - self.start() - - def fileno(self): - return self.fd_write - - @staticmethod - def remove_newline(msg): - return msg[:-1] if msg.endswith(os.linesep) else msg - - def run(self): - for line in self.reader: - self._write(self.remove_newline(line)) - - def _write(self, message): - self.logger.log(self.level, message) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index e4d86094c8..395d0c67fe 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -3,7 +3,6 @@ click==8.1.3 colorama==0.4.6 dparse==0.6.3 markupsafe==2.1.2 -pep517==0.13.0 pexpect==4.8.0 pipdeptree==2.8.0 plette==0.4.4 diff --git a/pyproject.toml b/pyproject.toml index f6cf6240f9..23e8c3d47d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,12 +12,11 @@ license = {file = "LICENSE"} authors = [ {name = "Pipenv maintainer team", email = "distutils-sig@python.org"}, ] -requires-python = ">=3.7" +requires-python = ">=3.8" classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/run-tests.sh b/run-tests.sh index 8d589726f6..55e38dd363 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -13,7 +13,7 @@ export PIPENV_CACHE_DIR=`mktemp -d 2>/dev/null || mktemp -d -t 'pipenv_cache'` # on some Linux OS python is python3 PYTHON=${PYTHON:-"python"} -PIPENV_PYTHON="${PIPENV_PYTHON:-3.7}" +PIPENV_PYTHON="${PIPENV_PYTHON:-3.8}" PIP_CALL="${PIP_CALL:-${PYTHON} -m pip install --user}" diff --git a/tasks/vendoring/patches/patched/pip23-compatability-finder.patch b/tasks/vendoring/patches/patched/pip23-compatability-finder.patch index 4bfbcd29f1..7fb665de9b 100644 --- a/tasks/vendoring/patches/patched/pip23-compatability-finder.patch +++ b/tasks/vendoring/patches/patched/pip23-compatability-finder.patch @@ -1,5 +1,5 @@ diff --git a/pipenv/patched/pip/_internal/index/package_finder.py b/pipenv/patched/pip/_internal/index/package_finder.py -index e74e6623..cc81f35c 100644 +index 2121ca327..04352c7d1 100644 --- a/pipenv/patched/pip/_internal/index/package_finder.py +++ b/pipenv/patched/pip/_internal/index/package_finder.py @@ -125,6 +125,7 @@ class LinkEvaluator: @@ -43,7 +43,7 @@ index e74e6623..cc81f35c 100644 @@ -199,7 +203,7 @@ class LinkEvaluator: return (LinkType.different_project, reason) - supported_tags = self._target_python.get_tags() + supported_tags = self._target_python.get_unsorted_tags() - if not wheel.supported(supported_tags): + if not wheel.supported(supported_tags) and not self._ignore_compatibility: # Include the wheel's tags in the reason string to @@ -66,7 +66,7 @@ index e74e6623..cc81f35c 100644 + def _sort_key( + self, + candidate: InstallationCandidate, -+ ignore_compatibility: bool = True ++ ignore_compatibility: bool = True, + ) -> CandidateSortingKey: """ Function to pass as the `key` argument to a call to sorted() to sort diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e16f783517..3a4c88f29b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -104,14 +104,6 @@ def pytest_runtest_setup(item): pytest.skip('test not applicable on python 3.8') if item.get_closest_marker('skip_osx') is not None and sys.platform == 'darwin': pytest.skip('test does not apply on OSX') - if item.get_closest_marker('lte_py36') is not None and ( - sys.version_info >= (3, 7) - ): - pytest.skip('test only runs on python < 3.7') - if item.get_closest_marker('skip_py36') is not None and ( - sys.version_info[:2] == (3, 6) - ): - pytest.skip('test is skipped on python 3.6') if item.get_closest_marker('skip_windows') is not None and (os.name == 'nt'): pytest.skip('test does not run on windows') diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 02d5b855d1..cfe231f0eb 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -526,6 +526,7 @@ def test_install_does_not_exclude_packaging(pipenv_instance_pypi): @pytest.mark.basic @pytest.mark.install @pytest.mark.needs_internet +@pytest.mark.skip(reason="pip 23.3 now vendors in truststore and so test assumptions invalid ") def test_install_will_supply_extra_pip_args(pipenv_instance_pypi): with pipenv_instance_pypi() as p: c = p.pipenv("""install -v dataclasses-json --extra-pip-args="--use-feature=truststore --proxy=test" """) @@ -556,3 +557,72 @@ def test_install_tarball_is_actually_installed(pipenv_instance_pypi): assert c.returncode == 0 c = p.pipenv("""run python -c "from dataclasses_json import dataclass_json" """) assert c.returncode == 0 + + +@pytest.mark.basic +@pytest.mark.install +def test_category_sorted_alphabetically_with_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +atomicwrites = "*" +colorama = "*" + """.strip() + f.write(contents) + c = p.pipenv("install build") + assert c.returncode == 0 + assert "build" in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == ["atomicwrites", "build", "colorama"] + + +@pytest.mark.basic +@pytest.mark.install +def test_sorting_handles_str_values_and_dict_values(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +zipp = {version = "*"} +parse = "*" +colorama = "*" +atomicwrites = {version = "*"} + """.strip() + f.write(contents) + c = p.pipenv("install build") + assert c.returncode == 0 + assert "build" in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == [ + "atomicwrites", + "build", + "colorama", + "parse", + "zipp", + ] + + +@pytest.mark.basic +@pytest.mark.install +def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[packages] +atomicwrites = "*" +colorama = "*" + """.strip() + f.write(contents) + c = p.pipenv("install build") + assert c.returncode == 0 + assert "build" in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == [ + "atomicwrites", + "colorama", + "build", + ] diff --git a/tests/integration/test_lockfile.py b/tests/integration/test_lockfile.py new file mode 100644 index 0000000000..2617fa8839 --- /dev/null +++ b/tests/integration/test_lockfile.py @@ -0,0 +1,63 @@ +from collections import defaultdict +import json +import pytest + +from pipenv.project import Project +from pipenv.utils import requirements + + +@pytest.fixture +def pypi_lockfile(): + lockfile = defaultdict(dict) + lockfile["_meta"] = { + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": True, + } + ] + } + yield lockfile + + +def test_git_branch_contains_slashes( + pipenv_instance_pypi, pypi_lockfile +): + pypi_lockfile["default"]["google-api-python-client"] = { + "git": "https://github.com/thehesiod/google-api-python-client.git@thehesiod/batch-retries2", + "markers": "python_version >= '3.7'", + "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706" + } + + with pipenv_instance_pypi() as p: + with open(p.lockfile_path, "w") as f: + json.dump(pypi_lockfile, f) + + project = Project() + lockfile = project.load_lockfile(expand_env_vars=False) + deps = lockfile["default"] + pip_installable_lines = requirements.requirements_from_lockfile( + deps, include_hashes=False, include_markers=True + ) + assert pip_installable_lines == ["google-api-python-client@ git+https://github.com/thehesiod/google-api-python-client.git@03803c21fc13a345e978f32775b2f2fa23c8e706"] + + +def test_git_branch_contains_subdirectory_fragment(pipenv_instance_pypi, pypi_lockfile): + pypi_lockfile["default"]["pep508_package"] = { + "git": "https://github.com/techalchemy/test-project.git@master", + "subdirectory": "parent_folder/pep508-package", + "ref": "03803c21fc13a345e978f32775b2f2fa23c8e706" + } + + with pipenv_instance_pypi() as p: + with open(p.lockfile_path, "w") as f: + json.dump(pypi_lockfile, f) + + project = Project() + lockfile = project.load_lockfile(expand_env_vars=False) + deps = lockfile["default"] + pip_installable_lines = requirements.requirements_from_lockfile( + deps, include_hashes=False, include_markers=True + ) + assert pip_installable_lines == ['pep508_package@ git+https://github.com/techalchemy/test-project.git@03803c21fc13a345e978f32775b2f2fa23c8e706#subdirectory=parent_folder/pep508-package'] diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index f97385982c..017a712042 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -143,6 +143,7 @@ def test_uninstall_all_dev(pipenv_instance_private_pypi): assert c.returncode == 0 +@pytest.mark.install @pytest.mark.uninstall def test_normalize_name_uninstall(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: @@ -188,6 +189,7 @@ def test_uninstall_all_dev_with_shared_dependencies(pipenv_instance_pypi): assert "six" in p.lockfile["default"] +@pytest.mark.install @pytest.mark.uninstall def test_uninstall_missing_parameters(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: @@ -200,6 +202,7 @@ def test_uninstall_missing_parameters(pipenv_instance_private_pypi): @pytest.mark.categories +@pytest.mark.install @pytest.mark.uninstall def test_uninstall_category_with_shared_requirement(pipenv_instance_pypi): with pipenv_instance_pypi() as p: @@ -223,6 +226,7 @@ def test_uninstall_category_with_shared_requirement(pipenv_instance_pypi): @pytest.mark.categories +@pytest.mark.install @pytest.mark.uninstall def test_uninstall_multiple_categories(pipenv_instance_private_pypi): with pipenv_instance_private_pypi() as p: @@ -243,3 +247,86 @@ def test_uninstall_multiple_categories(pipenv_instance_private_pypi): assert "six" not in p.lockfile.get("prereq", {}) assert "six" not in p.lockfile["default"] + + +@pytest.mark.install +@pytest.mark.uninstall +def test_category_sorted_alphabetically_with_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +parse = "*" +colorama = "*" +build = "*" +atomicwrites = "*" + """.strip() + f.write(contents) + + c = p.pipenv("install") + assert c.returncode == 0 + + c = p.pipenv("uninstall build") + assert c.returncode == 0 + assert "build" not in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == ["atomicwrites", "colorama", "parse"] + + +@pytest.mark.install +@pytest.mark.uninstall +def test_sorting_handles_str_values_and_dict_values(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +zipp = "*" +parse = {version = "*"} +colorama = "*" +build = "*" +atomicwrites = {version = "*"} + """.strip() + f.write(contents) + c = p.pipenv("install") + assert c.returncode == 0 + + c = p.pipenv("uninstall build") + assert c.returncode == 0 + assert "build" not in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == [ + "atomicwrites", + "colorama", + "parse", + "zipp", + ] + +@pytest.mark.install +@pytest.mark.uninstall +def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[packages] +parse = "*" +colorama = "*" +build = "*" +atomicwrites = "*" + """.strip() + f.write(contents) + + c = p.pipenv("install") + assert c.returncode == 0 + + c = p.pipenv("uninstall build") + assert c.returncode == 0 + assert "build" not in p.pipfile["packages"] + assert list(p.pipfile["packages"].keys()) == [ + "parse", + "colorama", + "atomicwrites", + ] diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py new file mode 100644 index 0000000000..c72e29ccf7 --- /dev/null +++ b/tests/integration/test_upgrade.py @@ -0,0 +1,52 @@ +import pytest + + +@pytest.mark.upgrade +def test_category_sorted_alphabetically_with_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[pipenv] +sort_pipfile = true + +[packages] +zipp = "*" +six = 1.11 +colorama = "*" +atomicwrites = "*" + """.strip() + f.write(contents) + + package_name = "six" + c = p.pipenv(f"upgrade {package_name}") + assert c.returncode == 0 + assert list(p.pipfile["packages"].keys()) == [ + "atomicwrites", + "colorama", + "six", + "zipp", + ] + + +@pytest.mark.upgrade +def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + with open(p.pipfile_path, "w") as f: + contents = """ +[packages] +zipp = "*" +six = 1.11 +colorama = "*" +atomicwrites = "*" + """.strip() + f.write(contents) + + package_name = "six" + c = p.pipenv(f"upgrade {package_name}") + assert c.returncode == 0 + assert list(p.pipfile["packages"].keys()) == [ + "zipp", + "six", + "colorama", + "atomicwrites", + ]