diff --git a/.flake8 b/.flake8
index e0107ae7ae9..15a4db83107 100644
--- a/.flake8
+++ b/.flake8
@@ -5,9 +5,6 @@ ban-relative-imports = true
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
format-greedy = 1
inline-quotes = double
-# Allow omission of a return type hint for __init__ if at least one argument is annotated
-# used by flake8-annotations
-mypy-init-return = true
enable-extensions = TC, TC1
type-checking-exempt-modules = typing, typing-extensions
eradicate-whitelist-extend = ^-.*;
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 00000000000..73e701c41be
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,75 @@
+name: "Documentation Preview"
+
+on:
+ pull_request:
+ # allow repository maintainers to modify and test workflow
+ paths-ignore:
+ - "**"
+ - "!.github/workflows/docs.yml"
+ pull_request_target:
+ # enable runs for this workflow when labeled as documentation only
+ # prevent execution when the workflow itself is modified from a fork
+ types:
+ - labeled
+ - synchronize
+ paths-ignore:
+ - "**"
+ - "!docs/**"
+
+jobs:
+ deploy:
+ name: Build & Deploy
+ runs-on: ubuntu-latest
+ if: |
+ (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'Documentation'))
+ || (github.event_name != 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
+ steps:
+ - name: Checkout Website Source
+ uses: actions/checkout@v3
+ with:
+ repository: python-poetry/website
+
+ - name: Checkout Poetry Source
+ uses: actions/checkout@v3
+ with:
+ path: poetry
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.9"
+
+ - name: Setup Node
+ uses: actions/setup-node@v2
+ with:
+ node-version: "14"
+
+ - name: Build Assets
+ run: npm ci && npm run prod
+
+ - name: Fetch Documentation
+ run: |
+ python -m pip install poetry
+ poetry install --no-dev
+ poetry run python bin/website build --local ./poetry
+
+ - name: Install Hugo
+ uses: peaceiris/actions-hugo@v2
+ with:
+ hugo-version: '0.83.1'
+
+ - name: Build
+ run: hugo -v --minify
+
+ - name: Deploy
+ uses: amondnet/vercel-action@v20
+ id: vercel-action
+ with:
+ vercel-token: ${{ secrets.VERCEL_TOKEN }}
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ vercel-org-id: ${{ secrets.VERCEL_ORG_ID }}
+ vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }}
+ scope: python-poetry
+ github-comment: true
+ working-directory: public
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index af810ff052e..fce06b435d6 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -5,6 +5,7 @@ on:
paths-ignore:
- 'docs/**'
- '.cirrus.yml'
+ - '.github/workflows/docs.yml'
branches:
- master
- develop
@@ -12,6 +13,7 @@ on:
paths-ignore:
- 'docs/**'
- '.cirrus.yml'
+ - '.github/workflows/docs.yml'
branches:
- '**'
@@ -29,11 +31,11 @@ jobs:
python-version: ["3.7", "3.8", "3.9", "3.10"]
include:
- os: Ubuntu
- image: ubuntu-latest
+ image: ubuntu-22.04
- os: Windows
image: windows-2022
- os: macOS
- image: macos-11
+ image: macos-12
fail-fast: false
defaults:
run:
@@ -79,12 +81,21 @@ jobs:
- name: Install dependencies
run: poetry install
+ - name: Run mypy
+ run: poetry run mypy
+
- name: Install pytest plugin
run: poetry run pip install pytest-github-actions-annotate-failures
- name: Run pytest
run: poetry run python -m pytest -p no:sugar -q tests/
+ - name: Run pytest (integration suite)
+ env:
+ POETRY_TEST_INTEGRATION_GIT_USERNAME: ${GITHUB_ACTOR}
+ POETRY_TEST_INTEGRATION_GIT_PASSWORD: ${{ secrets.GITHUB_TOKEN }}
+ run: poetry run python -m pytest -p no:sugar -q --integration tests/integration
+
- name: Get Plugin Version (poetry-plugin-export)
id: poetry-plugin-export-version
run: |
diff --git a/.github/workflows/skip.yml b/.github/workflows/skip.yml
index bda21acf365..aea1508ded7 100644
--- a/.github/workflows/skip.yml
+++ b/.github/workflows/skip.yml
@@ -7,6 +7,7 @@ on:
- '**'
- '!docs/**'
- '!.cirrus.yml'
+ - '!.github/workflows/docs.yml'
branches:
- master
- develop
@@ -15,6 +16,7 @@ on:
- '**'
- '!docs/**'
- '!.cirrus.yml'
+ - '!.github/workflows/docs.yml'
branches:
- '**'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 989f19fa60d..f462275abab 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,7 +24,6 @@ repos:
hooks:
- id: python-check-mock-methods
- id: python-use-type-annotations
- - id: python-check-blanket-type-ignore
- id: python-check-blanket-noqa
- repo: https://github.com/asottile/yesqa
@@ -47,12 +46,18 @@ repos:
- pep8-naming==0.12.1
- repo: https://github.com/asottile/pyupgrade
- rev: v2.32.0
+ rev: v2.32.1
hooks:
- id: pyupgrade
args: [--py37-plus]
exclude: ^(install|get)-poetry.py$
+ - repo: https://github.com/hadialqattan/pycln
+ rev: v1.3.2
+ hooks:
+ - id: pycln
+ args: [--all]
+
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
@@ -81,15 +86,7 @@ repos:
- id: flake8
additional_dependencies: *flake8_deps
- - repo: https://github.com/pre-commit/mirrors-mypy
- rev: v0.942
- hooks:
- - id: mypy
- pass_filenames: false
- additional_dependencies:
- - types-requests
-
- repo: https://github.com/pre-commit/pre-commit
- rev: v2.18.1
+ rev: v2.19.0
hooks:
- id: validate_manifest
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 73358148bfa..099130ea16d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -55,7 +55,7 @@ Provide more context by answering these questions:
Include details about your configuration and environment:
* **Which version of Poetry are you using?** You can get the exact version by running `poetry -V` in your terminal.
-* **Which Python version Poetry has been installed for?** Execute the `debug:info` to get the information.
+* **Which Python version Poetry has been installed for?** Execute the `poetry debug info` to get the information.
* **What's the name and version of the OS you're using**?
@@ -122,6 +122,13 @@ $ poetry install
$ poetry run pytest tests/
```
+Poetry uses [mypy](https://github.com/python/mypy) for typechecking, and the CI
+will fail if it finds any errors. To run mypy locally:
+
+```bash
+$ poetry run mypy
+```
+
Poetry uses the [black](https://github.com/psf/black) coding style and you must ensure that your
code follows it. If not, the CI will fail and your Pull Request will not be merged.
@@ -178,7 +185,7 @@ If you are helping with the triage of reported issues, this section provides som
#### Multiple versions
-Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pipxproject.github.io/pipx/) is useful.
+Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pypa.github.io/pipx/) is useful.
You can set your environment up like so.
diff --git a/README.md b/README.md
index eaa72c12813..6fd3ae13740 100644
--- a/README.md
+++ b/README.md
@@ -97,72 +97,6 @@ It takes inspiration in tools that exist in other languages, like `composer` (PH
And, finally, I started `poetry` to bring another exhaustive dependency resolver to the Python community apart from
[Conda's](https://conda.io).
-### What about Pipenv?
-
-In short: I do not like the CLI it provides, or some of the decisions made,
-and I think we can make a better and more intuitive one. Here are a few things
-that I don't like.
-
-#### Dependency resolution
-
-The dependency resolution is erratic and will fail even if there is a solution. Let's take an example:
-
-```bash
-pipenv install oslo.utils==1.4.0
-```
-
-will fail with this error:
-
-```text
-Could not find a version that matches pbr!=0.7,!=2.1.0,<1.0,>=0.6,>=2.0.0
-```
-
-while Poetry will get you the right set of packages:
-
-```bash
-poetry add oslo.utils=1.4.0
-```
-
-results in :
-
-```text
- - Installing pytz (2018.3)
- - Installing netifaces (0.10.6)
- - Installing netaddr (0.7.19)
- - Installing oslo.i18n (2.1.0)
- - Installing iso8601 (0.1.12)
- - Installing six (1.11.0)
- - Installing babel (2.5.3)
- - Installing pbr (0.11.1)
- - Installing oslo.utils (1.4.0)
-```
-
-This is possible thanks to the efficient dependency resolver at the heart of Poetry.
-
-Here is a breakdown of what exactly happens here:
-
-`oslo.utils (1.4.0)` depends on:
-
-- `pbr (>=0.6,!=0.7,<1.0)`
-- `Babel (>=1.3)`
-- `six (>=1.9.0)`
-- `iso8601 (>=0.1.9)`
-- `oslo.i18n (>=1.3.0)`
-- `netaddr (>=0.7.12)`
-- `netifaces (>=0.10.4)`
-
-What interests us is `pbr (>=0.6,!=0.7,<1.0)`.
-
-At this point, poetry will choose `pbr==0.11.1` which is the latest version that matches the constraint.
-
-Next it will try to select `oslo.i18n==3.20.0` which is the latest version that matches `oslo.i18n (>=1.3.0)`.
-
-However this version requires `pbr (!=2.1.0,>=2.0.0)` which is incompatible with `pbr==0.11.1`,
-so `poetry` will try to find a version of `oslo.i18n` that satisfies `pbr (>=0.6,!=0.7,<1.0)`.
-
-By analyzing the releases of `oslo.i18n`, it will find `oslo.i18n==2.1.0` which requires `pbr (>=0.11,<2.0)`.
-At this point the rest of the resolution is straightforward since there is no more conflict.
-
## Resources
* [Official Website](https://python-poetry.org)
diff --git a/docs/_index.md b/docs/_index.md
index 9f959fd6f2c..3fc000fc248 100644
--- a/docs/_index.md
+++ b/docs/_index.md
@@ -43,9 +43,14 @@ curl -sSL https://install.python-poetry.org | python3 -
**windows powershell install instructions**
```powershell
-(Invoke-WebRequest -Uri https://install.python-poetry.org -UseBasicParsing).Content | python -
+(Invoke-WebRequest -Uri https://install.python-poetry.org -UseBasicParsing).Content | py -
```
+{{% note %}}
+If you have installed Python through the Microsoft Store, replace `py` with `python` in the command
+above.
+{{% /note %}}
+
{{% note %}}
Note that the installer does not support Python < 3.7.
{{% /note %}}
@@ -155,7 +160,7 @@ curl -sSL https://install.python-poetry.org | POETRY_UNINSTALL=1 python3 -
{{< tab tabID="installing-with-pipx" >}}
-Using [`pipx`](https://github.com/pipxproject/pipx) to install Poetry is also possible.
+Using [`pipx`](https://github.com/pypa/pipx) to install Poetry is also possible.
`pipx` is used to install Python CLI applications globally while still isolating them in virtual environments.
This allows for clean upgrades and uninstalls.
diff --git a/docs/cli.md b/docs/cli.md
index 1ce86fcc149..81768824df0 100644
--- a/docs/cli.md
+++ b/docs/cli.md
@@ -27,6 +27,8 @@ then `--help` combined with any of those can give you more information.
* `--no-ansi`: Disable ANSI output.
* `--version (-V)`: Display this application version.
* `--no-interaction (-n)`: Do not ask any interactive question.
+* `--no-plugins`: Disables plugins.
+* `--no-cache`: Disables Poetry source caches.
## new
@@ -231,6 +233,7 @@ option is used.
When `--only` is specified, `--with` and `--without` options are ignored.
{{% /note %}}
+
## update
In order to get the latest versions of the dependencies and to update the `poetry.lock` file,
@@ -354,11 +357,15 @@ If the package(s) you want to install provide extras, you can specify them
when adding the package:
```bash
-poetry add requests[security,socks]
+poetry add "requests[security,socks]"
poetry add "requests[security,socks]~=2.22.0"
poetry add "git+https://github.com/pallets/flask.git@1.1.1[dotenv,dev]"
```
+{{% warning %}}
+Some shells may treat square braces (`[` and `]`) as special characters. It is suggested to always quote arguments containing these characters to prevent unexpected shell expansion.
+{{% /warning %}}
+
If you want to add a package to a specific group of dependencies, you can use the `--group (-G)` option:
```bash
@@ -437,6 +444,7 @@ required by
### Options
* `--without`: The dependency groups to ignore.
+* `--why`: Include reverse dependencies where applicable.
* `--with`: The optional dependency groups to include.
* `--only`: The only dependency groups to include.
* `--default`: Only include the main dependencies. (**Deprecated**)
@@ -505,6 +513,7 @@ See [Configuration]({{< relref "configuration" >}}) for all available settings.
* `--unset`: Remove the configuration element named by `setting-key`.
* `--list`: Show the list of current config variables.
+* `--local`: Set/Get settings that are specific to a project (in the local configuration file `poetry.toml`).
## run
@@ -591,26 +600,35 @@ This command shows the current version of the project or bumps the version of
the project and writes the new version back to `pyproject.toml` if a valid
bump rule is provided.
-The new version should ideally be a valid [semver](https://semver.org/) string or a valid bump rule:
-`patch`, `minor`, `major`, `prepatch`, `preminor`, `premajor`, `prerelease`.
+The new version should be a valid [PEP 440](https://peps.python.org/pep-0440/)
+string or a valid bump rule: `patch`, `minor`, `major`, `prepatch`, `preminor`,
+`premajor`, `prerelease`.
+
+{{% note %}}
+
+If you would like to use semantic versioning for your project, please see
+[here]({{< relref "libraries#versioning" >}}).
+
+{{% /note %}}
The table below illustrates the effect of these rules with concrete examples.
-| rule | before | after |
-| ---------- | ------------- | ------------- |
-| major | 1.3.0 | 2.0.0 |
-| minor | 2.1.4 | 2.2.0 |
-| patch | 4.1.1 | 4.1.2 |
-| premajor | 1.0.2 | 2.0.0-alpha.0 |
-| preminor | 1.0.2 | 1.1.0-alpha.0 |
-| prepatch | 1.0.2 | 1.0.3-alpha.0 |
-| prerelease | 1.0.2 | 1.0.3-alpha.0 |
-| prerelease | 1.0.3-alpha.0 | 1.0.3-alpha.1 |
-| prerelease | 1.0.3-beta.0 | 1.0.3-beta.1 |
+| rule | before | after |
+| ---------- |---------|---------|
+| major | 1.3.0 | 2.0.0 |
+| minor | 2.1.4 | 2.2.0 |
+| patch | 4.1.1 | 4.1.2 |
+| premajor | 1.0.2 | 2.0.0a0 |
+| preminor | 1.0.2 | 1.1.0a0 |
+| prepatch | 1.0.2 | 1.0.3a0 |
+| prerelease | 1.0.2 | 1.0.3a0 |
+| prerelease | 1.0.3a0 | 1.0.3a1 |
+| prerelease | 1.0.3b0 | 1.0.3b1 |
### Options
* `--short (-s)`: Output the version number only.
+* `--dry-run`: Do not update pyproject.toml file.
## export
diff --git a/docs/configuration.md b/docs/configuration.md
index b81edd26513..edd1c18aa3b 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -46,6 +46,8 @@ cache-dir = "/path/to/cache/directory"
virtualenvs.create = true
virtualenvs.in-project = null
virtualenvs.options.always-copy = true
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
virtualenvs.options.system-site-packages = false
virtualenvs.path = "{cache-dir}/virtualenvs" # /path/to/cache/directory/virtualenvs
virtualenvs.prefer-active-python = false
@@ -139,6 +141,54 @@ the number of maximum workers is still limited at `number_of_cores + 4`.
This configuration will be ignored when `installer.parallel` is set to false.
{{% /note %}}
+### `installer.no-binary`
+
+**Type**: string | bool
+
+*Introduced in 1.2.0*
+
+When set this configuration allows users to configure package distribution format policy for all or
+specific packages.
+
+| Configuration | Description |
+|------------------------|------------------------------------------------------------|
+| `:all:` or `true` | Disallow binary distributions for all packages. |
+| `:none:` or `false` | Allow binary distributions for all packages. |
+| `package[,package,..]` | Disallow binary distributions for specified packages only. |
+
+{{% note %}}
+This configuration is only respected when using the new installer. If you have disabled it please
+consider re-enabling it.
+
+As with all configurations described here, this is a user specific configuration. This means that this
+is not taken into consideration when a lockfile is generated or dependencies are resolved. This is
+applied only when selecting which distribution for dependency should be installed into a Poetry managed
+environment.
+{{% /note %}}
+
+{{% note %}}
+For project specific usage, it is recommended that this be configured with the `--local`.
+
+```bash
+poetry config --local installer.no-binary :all:
+```
+{{% /note %}}
+
+{{% note %}}
+For CI or container environments using [environment variable](#using-environment-variables)
+to configure this might be useful.
+
+```bash
+export POETRY_INSTALLER_NO_BINARY=:all:
+```
+{{% /note %}}
+
+{{% warning %}}
+Unless this is required system-wide, if configured globally, you could encounter slower install times
+across all your projects if incorrectly set.
+{{% /warning %}}
+
+
### `virtualenvs.create`
**Type**: boolean
@@ -148,11 +198,6 @@ Defaults to `true`.
If set to `false`, poetry will install dependencies into the current python environment.
-{{% note %}}
-When setting this configuration to `false`, the Python environment used must have `pip`
-installed and available.
-{{% /note %}}
-
### `virtualenvs.in-project`
**Type**: boolean
@@ -181,6 +226,37 @@ Defaults to `{cache-dir}/virtualenvs` (`{cache-dir}\virtualenvs` on Windows).
If set to `true` the `--always-copy` parameter is passed to `virtualenv` on creation of the venv. Thus all needed files are copied into the venv instead of symlinked.
Defaults to `false`.
+### `virtualenvs.options.no-pip`
+
+**Type**: boolean
+
+If set to `true` the `--no-pip` parameter is passed to `virtualenv` on creation of the venv. This means when a new
+virtual environment is created, `pip` will not be installed in the environment.
+Defaults to `false`.
+
+{{% note %}}
+Poetry, for its internal operations, uses the `pip` wheel embedded in the `virtualenv` package installed as a dependency
+in Poetry's runtime environment. If a user runs `poetry run pip` when this option is set to `true`, the `pip` the
+embedded instance of `pip` is used.
+
+You can safely set this, along with `no-setuptools`, to `true`, if you desire a virtual environment with no additional
+packages. This is desirable for production environments.
+{{% /note %}}
+
+### `virtualenvs.options.no-setuptools`
+
+**Type**: boolean
+
+If set to `true` the `--no-setuptools` parameter is passed to `virtualenv` on creation of the venv. This means when a new
+virtual environment is created, `setuptools` will not be installed in the environment. Poetry, for its internal operations,
+does not require `setuptools` and this can safely be set to `true`.
+Defaults to `false`.
+
+{{% warning %}}
+Some development tools like IDEs, make an assumption that `setuptools` (and other) packages are always present and
+available within a virtual environment. This can cause some features in these tools to not work as expected.
+{{% /warning %}}
+
### `virtualenvs.options.system-site-packages`
**Type**: boolean
diff --git a/docs/contributing.md b/docs/contributing.md
index d54359fde70..cc034dd5b81 100644
--- a/docs/contributing.md
+++ b/docs/contributing.md
@@ -66,7 +66,7 @@ Provide more context by answering these questions:
Include details about your configuration and environment:
* **Which version of Poetry are you using?** You can get the exact version by running `poetry -V` in your terminal.
-* **Which Python version Poetry has been installed for?** Execute the `debug:info` to get the information.
+* **Which Python version Poetry has been installed for?** Execute the `poetry debug info` to get the information.
* **What's the name and version of the OS you're using**?
@@ -139,6 +139,13 @@ $ poetry install
$ poetry run pytest tests/
```
+Poetry uses [mypy](https://github.com/python/mypy) for typechecking, and the CI
+will fail if it finds any errors. To run mypy locally:
+
+```bash
+$ poetry run mypy
+```
+
Poetry uses the [black](https://github.com/psf/black) coding style and you must ensure that your
code follows it. If not, the CI will fail and your Pull Request will not be merged.
@@ -198,7 +205,7 @@ If you are helping with the triage of reported issues, this section provides som
#### Multiple versions
-Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pipxproject.github.io/pipx/) is useful.
+Often times you would want to attempt to reproduce issues with multiple versions of `poetry` at the same time. For these use cases, the [pipx project](https://pypa.github.io/pipx/) is useful.
You can set your environment up like so.
diff --git a/docs/dependency-specification.md b/docs/dependency-specification.md
index 0e381563aca..185e2eb4965 100644
--- a/docs/dependency-specification.md
+++ b/docs/dependency-specification.md
@@ -116,6 +116,34 @@ To use an SSH connection, for example in the case of private repositories, use t
requests = { git = "git@github.com:requests/requests.git" }
```
+To use HTTP basic authentication with your git repositories, you can configure credentials similar to
+how [repository credentials]({{< relref "repositories#configuring-credentials" >}}) are configured.
+
+```bash
+poetry config repositories.git-org-project https://github.com/org/project.git
+poetry config http-basic.git-org-project username token
+poetry add git+https://github.com/org/project.git
+```
+
+{{% note %}}
+With Poetry 1.2 releases, the default git client used is [Dulwich](https://www.dulwich.io/).
+
+We fall back to legacy system git client implementation in cases where
+[gitcredentials](https://git-scm.com/docs/gitcredentials) is used. This fallback will be removed in
+a future release where `gitcredentials` helpers can be better supported natively.
+
+In cases where you encounter issues with the default implementation that used to work prior to
+Poetry 1.2, you may wish to explicitly configure the use of the system git client via a shell
+subprocess call.
+
+```bash
+poetry config experimental.system-git-client true
+```
+
+Keep in mind however, that doing so will surface bugs that existed in versions prior to 1.2 which
+were caused due to the use of the system git client.
+{{% /note %}}
+
## `path` dependencies
To depend on a library located in a local directory or file,
diff --git a/docs/faq.md b/docs/faq.md
index a986c16feff..445b0e6e505 100644
--- a/docs/faq.md
+++ b/docs/faq.md
@@ -106,3 +106,11 @@ The current project's Python requirement (>=3.7.0,<4.0.0) is not compatible with
Usually you will want to match the Python requirement of your project with the upper bound of the failing dependency.
Alternative you can tell Poetry to install this dependency [only for a specific range of Python versions](/docs/dependency-specification/#multiple-constraints-dependencies),
if you know that it's not needed in all versions.
+
+
+### Why does Poetry enforce PEP 440 versions?
+
+This is done so to be compliant with the broader Python ecosystem.
+
+For example, if Poetry builds a distribution for a project that uses a version that is not valid according to
+[PEP 440](https://peps.python.org/pep-0440), third party tools will be unable to parse the version correctly.
diff --git a/docs/libraries.md b/docs/libraries.md
index bd7d195bd6c..ab56979540b 100644
--- a/docs/libraries.md
+++ b/docs/libraries.md
@@ -17,11 +17,19 @@ This chapter will tell you how to make your library installable through Poetry.
## Versioning
-While Poetry does not enforce any convention regarding package versioning,
-it **strongly** recommends to follow [semantic versioning](https://semver.org).
+Poetry requires [PEP 440](https://peps.python.org/pep-0440)-compliant versions for all projects.
-This has many advantages for the end users and allows them to set appropriate
-[version constraints]({{< relref "dependency-specification#version-constraints" >}}).
+While Poetry does not enforce any release convention, it does encourage the use of
+[semantic versioning](https://semver.org/) within the scope of
+[PEP 440](https://peps.python.org/pep-0440/#semantic-versioning). This has many advantages for the end users
+and allows them to set appropriate [version constraints]({{< relref "dependency-specification#version-constraints" >}}).
+
+{{% note %}}
+
+As an example, `1.0.0-hotfix.1` is not compatible with [PEP 440](https://peps.python.org/pep-0440). You can instead
+choose to use `1.0.0-post1` or `1.0.0.post1`.
+
+{{% /note %}}
## Lock file
diff --git a/docs/plugins.md b/docs/plugins.md
index f8d27b75b5c..7dc51547182 100644
--- a/docs/plugins.md
+++ b/docs/plugins.md
@@ -67,11 +67,8 @@ from poetry.poetry import Poetry
class MyPlugin(Plugin):
def activate(self, poetry: Poetry, io: IO):
- version = self.get_custom_version()
- io.write_line(f"Setting package version to {version}")
- poetry.package.set_version(version)
-
- def get_custom_version(self) -> str:
+ io.write_line("Setting readme")
+ poetry.package.readme = "README.md"
...
```
diff --git a/docs/pyproject.md b/docs/pyproject.md
index bfaca98fb52..cc77e17a492 100644
--- a/docs/pyproject.md
+++ b/docs/pyproject.md
@@ -17,17 +17,35 @@ The `tool.poetry` section of the `pyproject.toml` file is composed of multiple s
The name of the package. **Required**
+```toml
+name = "my-package"
+```
+
## version
The version of the package. **Required**
-This should follow [semantic versioning](http://semver.org/). However it will not be enforced and you remain
-free to follow another specification.
+This should be a valid [PEP 440](https://peps.python.org/pep-0440/) string.
+
+```toml
+version = "0.1.0"
+```
+
+{{% note %}}
+
+If you would like to use semantic versioning for your project, please see
+[here]({{< relref "libraries#versioning" >}}).
+
+{{% /note %}}
## description
A short description of the package. **Required**
+```toml
+description = "A short description of the package."
+```
+
## license
The license of the package.
@@ -55,40 +73,76 @@ More identifiers are listed at the [SPDX Open Source License Registry](https://s
If your project is proprietary and does not use a specific licence, you can set this value as `Proprietary`.
{{% /note %}}
+```toml
+license = "MIT"
+```
+
## authors
The authors of the package. **Required**
This is a list of authors and should contain at least one author. Authors must be in the form `name `.
+```toml
+authors = [
+ "Sébastien Eustace ",
+]
+```
+
## maintainers
The maintainers of the package. **Optional**
This is a list of maintainers and should be distinct from authors. Maintainers may contain an email and be in the form `name `.
+```toml
+maintainers = [
+ "Richard Brave ",
+]
+```
+
## readme
The readme file of the package. **Optional**
The file can be either `README.rst` or `README.md`.
+```toml
+readme = "README.md" # or "README.rst"
+```
+
## homepage
An URL to the website of the project. **Optional**
+```toml
+homepage = "https://python-poetry.org/"
+```
+
## repository
An URL to the repository of the project. **Optional**
+```toml
+repository = "https://github.com/python-poetry/poetry"
+```
+
## documentation
An URL to the documentation of the project. **Optional**
+```toml
+documentation = "https://python-poetry.org/docs/"
+```
+
## keywords
A list of keywords that the package is related to. **Optional**
+```toml
+keywords = ["packaging", "poetry"]
+```
+
## classifiers
A list of PyPI [trove classifiers](https://pypi.org/classifiers/) that describe the project. **Optional**
diff --git a/docs/repositories.md b/docs/repositories.md
index f9dd873b84f..490d78bf785 100644
--- a/docs/repositories.md
+++ b/docs/repositories.md
@@ -11,7 +11,8 @@ menu:
# Repositories
-## Using the PyPI repository
+Poetry supports the use of [PyPI](https://pypi.org) and private repositories for discovery of
+packages as well as for publishing your projects.
By default, Poetry is configured to use the [PyPI](https://pypi.org) repository,
for package installation and publishing.
@@ -21,24 +22,283 @@ on PyPI.
This represents most cases and will likely be enough for most users.
+### Private Repository Example
-## Using a private repository
+#### Installing from private package sources
+By default, Poetry discovers and installs packages from [PyPI](https://pypi.org). But, you want to
+install a dependency to your project for a [simple API repository](#simple-api-repository)? Let's
+do it.
-However, at times, you may need to keep your package private while still being
-able to share it with your teammates. In this case, you will need to use a private
-repository.
+First, [configure](#project-configuration) the [package source](#package-source) repository to your
+project.
-### Adding a repository
+```bash
+poetry source add foo https://pypi.example.org/simple/
+```
+
+Then, assuming the repository requires authentication, configure credentials for it.
+
+```bash
+poetry config http-basic.foo username password
+```
+
+Once this is done, you can add dependencies to your project from this source.
+
+```bash
+poetry add --source foo private-package
+```
+
+#### Publishing to a private repository
+
+Great, now all that is left is to publish your package. Assuming you'd want to share it privately
+with your team, you can configure the
+[Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) endpoint for your
+[publishable repository](#publishable-repository).
+
+```bash
+poetry config repositories.foo https://pypi.example.org/legacy/
+```
+
+{{% note %}}
+
+If you need to use a different credential for your [package source](#package-sources), then it is
+recommended to use a different name for your publishing repository.
+
+```bash
+poetry config repositories.foo-pub https://pypi.example.org/legacy/
+poetry config http-basic.foo-pub username password
+```
+
+{{% /note %}}
+
+Now, all the is left is to build and publish your project using the
+[`publish`]({{< relref "cli#publish" >}}).
+
+```bash
+poetry publish --build --repository foo-pub
+```
+
+## Package Sources
+
+By default, Poetry is configured to use the Python ecosystem's canonical package index
+[PyPI](https://pypi.org).
+
+{{% note %}}
+
+With the exception of the implicitly configured source for [PyPI](https://pypi.org) named `pypi`,
+package sources are local to a project and must be configured within the project's
+[`pyproject.toml`]({{< relref "pyproject" >}}) file. This is **not** the same configuration used
+when publishing a package.
+
+{{% /note %}}
+
+### Project Configuration
+
+These package sources maybe managed using the [`source`]({{< relref "cli#source" >}}) command for
+your project.
+
+```bash
+poetry source add foo https://foo.bar/simple/
+```
+
+This will generate the following configuration snippet in your
+[`pyproject.toml`]({{< relref "pyproject" >}}) file.
+
+```toml
+[[tool.poetry.source]]
+name = "foo"
+url = "https://foo.bar/simple/"
+default = false
+secondary = false
+```
+
+{{% warning %}}
+
+If package sources are defined for a project, these will take precedence over
+[PyPI](https://pypi.org). If you do not want this to be the case, you should declare **all** package
+sources to be [secondary](#secondary-package-sources).
+
+{{% /warning %}}
+
+See [Supported Package Sources](#supported-package-sources) for source type specific information.
+
+{{% note %}}
+
+If your package source requires [credentials](#configuring-credentials) or
+[certificates](#certificates), please refer to the relevant sections below.
+
+{{% /note %}}
+
+#### Default Package Source
+
+By default, Poetry configures [PyPI](https://pypi.org) as the default package source for your
+project. You can alter this behaviour and exclusively look up packages only from the configured
+package sources by adding a **single** source with `default = true`.
+
+```bash
+poetry source add --default foo https://foo.bar/simple/
+```
+
+{{% warning %}}
+
+Configuring a custom package source as default, will effectively disable [PyPI](https://pypi.org)
+as a package source for your project.
-Adding a new repository is easy with the `config` command.
+{{% /warning %}}
+
+#### Secondary Package Sources
+
+If package sources are configured as secondary, all it means is that these will be given a lower
+priority when selecting compatible package distribution that also exists in your default package
+source.
+
+You can configure a package source as a secondary source with `secondary = true` in your package
+source configuration.
```bash
-poetry config repositories.foo https://foo.bar/simple/
+poetry source add --secondary foo https://foo.bar/simple/
```
-This will set the url for repository `foo` to `https://foo.bar/simple/`.
+There can be more than one secondary package source.
+
+{{% note %}}
+
+All package sources (including secondary sources) will be searched during the package lookup
+process. These network requests will occur for all sources, regardless of if the package is
+found at one or more sources.
-### Configuring credentials
+If you wish to avoid this, you may explicitly specify which source to search in for a particular
+package.
+
+```bash
+poetry add --source pypi httpx
+```
+
+{{% /note %}}
+
+### Supported Package Sources
+
+#### Python Package Index (PyPI)
+
+Poetry interacts with [PyPI](https://pypi.org) via its
+[JSON API](https://warehouse.pypa.io/api-reference/json.html). This is used to retrieve a requested
+package's versions, metadata, files, etc.
+
+{{% note %}}
+
+If the the package's published metadata is invalid, Poetry will download the available bdist/sdist to
+inspect it locally to identify the relevant metadata.
+
+{{% /note %}}
+
+If you want to explicitly select a package from [PyPI](https://pypi.org) you can use the `--source`
+option with the [`add`]({{< relref "cli#add" >}}) command, like shown below.
+
+```bash
+poetry add --source pypi httpx@^0.22.0
+```
+
+This will generate the following configuration snippet in your `pyproject.toml` file.
+
+```toml
+httpx = {version = "^0.22.0", source = "pypi"}
+```
+
+{{% warning %}}
+
+If any source within a project is configured with `default = true`, The implicit `pypi` source will
+be disabled and not used for any packages.
+
+{{% /warning %}}
+
+#### Simple API Repository
+
+Poetry can fetch and install package dependencies from public or private custom repositories that
+implement the simple repository API as described in [PEP 503](https://peps.python.org/pep-0503/).
+
+{{% warning %}}
+
+When using sources that distributes large wheels without providing file checksum in file URLs,
+Poetry will download each candidate wheel at least once in order to generate the checksum. This can
+manifest as long dependency resolution times when adding packages from this source.
+
+{{% /warning %}}
+
+These package sources maybe configured via the following command in your project.
+
+```bash
+poetry source add testpypi https://test.pypi.org/simple/
+```
+
+{{% note %}}
+
+Note the trailing `/simple/`. This is important when configuring
+[PEP 503](https://peps.python.org/pep-0503/) compliant package sources.
+
+{{% /note %}}
+
+In addition to [PEP 503](https://peps.python.org/pep-0503/), Poetry can also handle simple API
+repositories that implement [PEP 658](https://peps.python.org/pep-0658/) (*Introduced in 1.2.0*).
+This is helpful in reducing dependency resolution time for packages from these sources as Poetry can
+avoid having to download each candidate distribution, in order to determine associated metadata.
+
+{{% note %}}
+
+*Why does Poetry insist on downloading all candidate distributions for all platforms when metadata
+is not available?*
+
+The need for this stems from the fact that Poetry's lock file is platform-agnostic. This means, in
+order to resolve dependencies for a project, Poetry needs metadata for all platform specific
+distributions. And when this metadata is not readily available, downloading the distribution and
+inspecting it locally is the only remaining option.
+
+{{% /note %}}
+
+#### Single Page Link Source
+
+*Introduced in 1.2.0*
+
+Some projects choose to release their binary distributions via a single page link source that
+partially follows the structure of a package page in [PEP 503](https://peps.python.org/pep-0503/).
+
+These package sources maybe configured via the following command in your project.
+
+```bash
+poetry source add jax https://storage.googleapis.com/jax-releases/jax_releases.html
+```
+
+{{% note %}}
+
+All caveats regarding slower resolution times described for simple API repositories do apply here as
+well.
+
+{{% /note %}}
+
+
+## Publishable Repositories
+
+Poetry treats repositories to which you publish packages as user specific and not project specific
+configuration unlike [package sources](#package-sources). Poetry, today, only supports the
+[Legacy Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) when publishing
+your project.
+
+These are configured using the [`config`]({{< relref "cli#config" >}}) command, under the
+`repository` key.
+
+```bash
+poetry config repository.testpypi https://upload.test.pypi.org/legacy/
+```
+
+{{% note %}}
+
+[Legacy Upload API](https://warehouse.pypa.io/api-reference/legacy.html#upload-api) URLs are
+typically different to the same one provided by the repository for the simple API. You'll note that
+in the example of [Test PyPI](https://test.pypi.org/), both the host (`upload.test.pypi.org`) as
+well as the path (`/legacy`) are different to it's simple API (`https://test.pypi.org/simple`).
+
+{{% /note %}}
+
+## Configuring Credentials
If you want to store your credentials for a specific repository, you can do so easily:
@@ -49,6 +309,7 @@ poetry config http-basic.foo username password
If you do not specify the password you will be prompted to write it.
{{% note %}}
+
To publish to PyPI, you can set your credentials for the repository named `pypi`.
Note that it is recommended to use [API tokens](https://pypi.org/help/#apitoken)
@@ -65,8 +326,8 @@ call to `config`.
```bash
poetry config http-basic.pypi username password
```
-{{% /note %}}
+{{% /note %}}
You can also specify the username and password when using the `publish` command
with the `--username` and `--password` options.
@@ -109,7 +370,10 @@ You can prevent this by adding double dashes to prevent any following argument f
poetry config -- http-basic.pypi myUsername -myPasswordStartingWithDash
```
-#### Custom certificate authority and mutual TLS authentication
+## Certificates
+
+### Custom certificate authority and mutual TLS authentication
+
Poetry supports repositories that are secured by a custom certificate authority as well as those that require
certificate-based client authentication. The following will configure the "foo" repository to validate the repository's
certificate using a custom certificate authority and use a client certificate (note that these config variables do not
@@ -120,53 +384,27 @@ poetry config certificates.foo.cert /path/to/ca.pem
poetry config certificates.foo.client-cert /path/to/client.pem
```
-### Install dependencies from a private repository
+## Caches
-Now that you can publish to your private repository, you need to be able to
-install dependencies from it.
+Poetry employs multiple caches for package sources in order to improve user experience and avoid duplicate network
+requests.
-For that, you have to edit your `pyproject.toml` file, like so
+The first level cache is a [Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control)
+header based cache for almost all HTTP requests.
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-```
-
-From now on, Poetry will also look for packages in your private repository.
-
-{{% note %}}
-Any custom repository will have precedence over PyPI.
+Further, every HTTP backed package source caches metadata associated with a package once it is fetched or generated.
+Additionally, downloaded files (package distributions) are also cached.
-If you still want PyPI to be your primary source for your packages
-you can declare custom repositories as secondary.
+## Debugging Issues
+If you encounter issues with package sources, one of the simplest steps you might take to debug an issue is rerunning
+your command with the `--no-cache` flag.
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-secondary = true
+```bash
+poetry --no-cache add pycowsay
```
-{{% /note %}}
-
-If your private repository requires HTTP Basic Auth be sure to add the username and
-password to your `http-basic` configuration using the example above (be sure to use the
-same name that is in the `tool.poetry.source` section). If your repository requires either
-a custom certificate authority or client certificates, similarly refer to the example above to configure the
-`certificates` section. Poetry will use these values to authenticate to your private repository when downloading or
-looking for packages.
-
-
-### Disabling the PyPI repository
-
-If you want your packages to be exclusively looked up from a private
-repository, you can set it as the default one by using the `default` keyword
-```toml
-[[tool.poetry.source]]
-name = "foo"
-url = "https://foo.bar/simple/"
-default = true
-```
+If this solves your issue, you can consider clearing your cache using the [`cache`]({{< relref "cli#cache-clear" >}})
+command.
-A default source will also be the fallback source if you add other sources.
+Alternatively, you could also consider enabling very verbose loging `-vvv` along with the `--no-cache` to see network
+requests being made in the logs.
diff --git a/poetry.lock b/poetry.lock
index 833e94621f9..c773098d0b9 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -22,7 +22,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
[[package]]
name = "cachecontrol"
-version = "0.12.10"
+version = "0.12.11"
description = "httplib2 caching for requests"
category = "main"
optional = false
@@ -110,12 +110,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
-version = "6.3.2"
+version = "6.3.3"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "extra == \"toml\""}
+
[package.extras]
toml = ["tomli"]
@@ -129,7 +132,7 @@ python-versions = ">=3.6,<4.0"
[[package]]
name = "cryptography"
-version = "36.0.2"
+version = "37.0.2"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
@@ -144,11 +147,11 @@ docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools_rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
-test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
+test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
[[package]]
name = "deepdiff"
-version = "5.8.0"
+version = "5.8.1"
description = "Deep Difference and Search of any Python object/data."
category = "dev"
optional = false
@@ -168,6 +171,25 @@ category = "main"
optional = false
python-versions = "*"
+[[package]]
+name = "dulwich"
+version = "0.20.38"
+description = "Python Git Library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+certifi = "*"
+urllib3 = ">=1.24.1"
+
+[package.extras]
+fastimport = ["fastimport"]
+https = ["urllib3[secure] (>=1.24.1)"]
+paramiko = ["paramiko"]
+pgp = ["gpg"]
+watch = ["pyinotify"]
+
[[package]]
name = "entrypoints"
version = "0.3"
@@ -178,7 +200,7 @@ python-versions = ">=2.7"
[[package]]
name = "filelock"
-version = "3.6.0"
+version = "3.7.0"
description = "A platform independent file lock."
category = "main"
optional = false
@@ -224,7 +246,7 @@ python-versions = ">=3"
[[package]]
name = "identify"
-version = "2.4.12"
+version = "2.5.0"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -312,6 +334,33 @@ category = "main"
optional = false
python-versions = "*"
+[[package]]
+name = "mypy"
+version = "0.950"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "nodeenv"
version = "1.6.0"
@@ -366,15 +415,15 @@ testing = ["coverage", "nose"]
[[package]]
name = "platformdirs"
-version = "2.5.1"
+version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
-test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
+test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
@@ -404,7 +453,7 @@ importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""}
[[package]]
name = "poetry-plugin-export"
-version = "1.0.1"
+version = "1.0.2"
description = "Poetry plugin to export the dependencies to various formats"
category = "main"
optional = false
@@ -415,7 +464,7 @@ poetry = ">=1.2.0b1dev0,<2.0.0"
[[package]]
name = "pre-commit"
-version = "2.18.1"
+version = "2.19.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
@@ -464,7 +513,7 @@ python-versions = "*"
[[package]]
name = "pyparsing"
-version = "3.0.8"
+version = "3.0.9"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "main"
optional = false
@@ -475,11 +524,11 @@ diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pytest"
-version = "6.2.5"
+version = "7.1.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
@@ -490,23 +539,22 @@ iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
py = ">=1.8.2"
-toml = "*"
+tomli = ">=1.0.0"
[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "pytest-cov"
-version = "2.12.1"
+version = "3.0.0"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
[package.dependencies]
-coverage = ">=5.2.1"
+coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
-toml = "*"
[package.extras]
testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
@@ -585,7 +633,7 @@ requests = ">=2.0.1,<3.0.0"
[[package]]
name = "secretstorage"
-version = "3.3.1"
+version = "3.3.2"
description = "Python bindings to FreeDesktop.org Secret Service API"
category = "main"
optional = false
@@ -627,9 +675,17 @@ category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "tomlkit"
-version = "0.10.1"
+version = "0.10.2"
description = "Style preserving TOML library"
category = "main"
optional = false
@@ -658,13 +714,40 @@ virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,
docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"]
testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"]
+[[package]]
+name = "typed-ast"
+version = "1.5.3"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "types-requests"
+version = "2.27.26"
+description = "Typing stubs for requests"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+types-urllib3 = "<1.27"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.14"
+description = "Typing stubs for urllib3"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "typing-extensions"
-version = "4.1.1"
-description = "Backported and Experimental Type Hints for Python 3.6+"
+version = "4.2.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "urllib3"
@@ -721,7 +804,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
-content-hash = "f74aedfd57d8aa47486cacfd4e2f5a24e952cfe1aee43c7b6a6d801eec5254ea"
+content-hash = "5421b8901d8d4589152de8dafcb79827eaefa00ae7c3af53092be08a0caed970"
[metadata.files]
atomicwrites = [
@@ -733,8 +816,8 @@ attrs = [
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
cachecontrol = [
- {file = "CacheControl-0.12.10-py2.py3-none-any.whl", hash = "sha256:b0d43d8f71948ef5ebdee5fe236b86c6ffc7799370453dccb0e894c20dfa487c"},
- {file = "CacheControl-0.12.10.tar.gz", hash = "sha256:d8aca75b82eec92d84b5d6eb8c8f66ea16f09d2adb09dbca27fe2d5fc8d3732d"},
+ {file = "CacheControl-0.12.11-py2.py3-none-any.whl", hash = "sha256:2c75d6a8938cb1933c75c50184549ad42728a27e9f6b92fd677c3151aa72555b"},
+ {file = "CacheControl-0.12.11.tar.gz", hash = "sha256:a5b9fcc986b184db101aa280b42ecdcdfc524892596f606858e0b7a8b4d9e144"},
]
cachy = [
{file = "cachy-0.3.0-py2.py3-none-any.whl", hash = "sha256:338ca09c8860e76b275aff52374330efedc4d5a5e45dc1c5b539c1ead0786fe7"},
@@ -813,89 +896,94 @@ colorama = [
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
coverage = [
- {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"},
- {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"},
- {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"},
- {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"},
- {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"},
- {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"},
- {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"},
- {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"},
- {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"},
- {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"},
- {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"},
- {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"},
- {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"},
- {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"},
- {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"},
- {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"},
- {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"},
- {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"},
- {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"},
- {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"},
- {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"},
- {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"},
- {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"},
- {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"},
- {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"},
- {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"},
- {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"},
- {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"},
- {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"},
- {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"},
- {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"},
- {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"},
- {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"},
- {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"},
- {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"},
- {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"},
- {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"},
- {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"},
- {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"},
- {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"},
- {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"},
+ {file = "coverage-6.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df32ee0f4935a101e4b9a5f07b617d884a531ed5666671ff6ac66d2e8e8246d8"},
+ {file = "coverage-6.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75b5dbffc334e0beb4f6c503fb95e6d422770fd2d1b40a64898ea26d6c02742d"},
+ {file = "coverage-6.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:114944e6061b68a801c5da5427b9173a0dd9d32cd5fcc18a13de90352843737d"},
+ {file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab88a01cd180b5640ccc9c47232e31924d5f9967ab7edd7e5c91c68eee47a69"},
+ {file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad8f9068f5972a46d50fe5f32c09d6ee11da69c560fcb1b4c3baea246ca4109b"},
+ {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4cd696aa712e6cd16898d63cf66139dc70d998f8121ab558f0e1936396dbc579"},
+ {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c1a9942e282cc9d3ed522cd3e3cab081149b27ea3bda72d6f61f84eaf88c1a63"},
+ {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c06455121a089252b5943ea682187a4e0a5cf0a3fb980eb8e7ce394b144430a9"},
+ {file = "coverage-6.3.3-cp310-cp310-win32.whl", hash = "sha256:cb5311d6ccbd22578c80028c5e292a7ab9adb91bd62c1982087fad75abe2e63d"},
+ {file = "coverage-6.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d4a6f30f611e657495cc81a07ff7aa8cd949144e7667c5d3e680d73ba7a70e4"},
+ {file = "coverage-6.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79bf405432428e989cad7b8bc60581963238f7645ae8a404f5dce90236cc0293"},
+ {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:338c417613f15596af9eb7a39353b60abec9d8ce1080aedba5ecee6a5d85f8d3"},
+ {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db094a6a4ae6329ed322a8973f83630b12715654c197dd392410400a5bfa1a73"},
+ {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1414e8b124611bf4df8d77215bd32cba6e3425da8ce9c1f1046149615e3a9a31"},
+ {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:93b16b08f94c92cab88073ffd185070cdcb29f1b98df8b28e6649145b7f2c90d"},
+ {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbc86ae8cc129c801e7baaafe3addf3c8d49c9c1597c44bdf2d78139707c3c62"},
+ {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b5ba058610e8289a07db2a57bce45a1793ec0d3d11db28c047aae2aa1a832572"},
+ {file = "coverage-6.3.3-cp37-cp37m-win32.whl", hash = "sha256:8329635c0781927a2c6ae068461e19674c564e05b86736ab8eb29c420ee7dc20"},
+ {file = "coverage-6.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:e5af1feee71099ae2e3b086ec04f57f9950e1be9ecf6c420696fea7977b84738"},
+ {file = "coverage-6.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e814a4a5a1d95223b08cdb0f4f57029e8eab22ffdbae2f97107aeef28554517e"},
+ {file = "coverage-6.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f4fbf3633cb0713437291b8848634ea97f89c7e849c2be17a665611e433f53"},
+ {file = "coverage-6.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3401b0d2ed9f726fadbfa35102e00d1b3547b73772a1de5508ef3bdbcb36afe7"},
+ {file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8586b177b4407f988731eb7f41967415b2197f35e2a6ee1a9b9b561f6323c8e9"},
+ {file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892e7fe32191960da559a14536768a62e83e87bbb867e1b9c643e7e0fbce2579"},
+ {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afb03f981fadb5aed1ac6e3dd34f0488e1a0875623d557b6fad09b97a942b38a"},
+ {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cbe91bc84be4e5ef0b1480d15c7b18e29c73bdfa33e07d3725da7d18e1b0aff2"},
+ {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:91502bf27cbd5c83c95cfea291ef387469f2387508645602e1ca0fd8a4ba7548"},
+ {file = "coverage-6.3.3-cp38-cp38-win32.whl", hash = "sha256:c488db059848702aff30aa1d90ef87928d4e72e4f00717343800546fdbff0a94"},
+ {file = "coverage-6.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6534fcdfb5c503affb6b1130db7b5bfc8a0f77fa34880146f7a5c117987d0"},
+ {file = "coverage-6.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc692c9ee18f0dd3214843779ba6b275ee4bb9b9a5745ba64265bce911aefd1a"},
+ {file = "coverage-6.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:462105283de203df8de58a68c1bb4ba2a8a164097c2379f664fa81d6baf94b81"},
+ {file = "coverage-6.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc972d829ad5ef4d4c5fcabd2bbe2add84ce8236f64ba1c0c72185da3a273130"},
+ {file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06f54765cdbce99901871d50fe9f41d58213f18e98b170a30ca34f47de7dd5e8"},
+ {file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7835f76a081787f0ca62a53504361b3869840a1620049b56d803a8cb3a9eeea3"},
+ {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6f5fee77ec3384b934797f1873758f796dfb4f167e1296dc00f8b2e023ce6ee9"},
+ {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:baa8be8aba3dd1e976e68677be68a960a633a6d44c325757aefaa4d66175050f"},
+ {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d06380e777dd6b35ee936f333d55b53dc4a8271036ff884c909cf6e94be8b6c"},
+ {file = "coverage-6.3.3-cp39-cp39-win32.whl", hash = "sha256:f8cabc5fd0091976ab7b020f5708335033e422de25e20ddf9416bdce2b7e07d8"},
+ {file = "coverage-6.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c9441d57b0963cf8340268ad62fc83de61f1613034b79c2b1053046af0c5284"},
+ {file = "coverage-6.3.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:d522f1dc49127eab0bfbba4e90fa068ecff0899bbf61bf4065c790ddd6c177fe"},
+ {file = "coverage-6.3.3.tar.gz", hash = "sha256:2781c43bffbbec2b8867376d4d61916f5e9c4cc168232528562a61d1b4b01879"},
]
crashtest = [
{file = "crashtest-0.3.1-py3-none-any.whl", hash = "sha256:300f4b0825f57688b47b6d70c6a31de33512eb2fa1ac614f780939aa0cf91680"},
{file = "crashtest-0.3.1.tar.gz", hash = "sha256:42ca7b6ce88b6c7433e2ce47ea884e91ec93104a4b754998be498a8e6c3d37dd"},
]
cryptography = [
- {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"},
- {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"},
- {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"},
- {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"},
- {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"},
- {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"},
- {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"},
- {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"},
- {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"},
- {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"},
- {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"},
- {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"},
- {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"},
- {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"},
- {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"},
- {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"},
- {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"},
- {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"},
- {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"},
- {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"},
+ {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:ef15c2df7656763b4ff20a9bc4381d8352e6640cfeb95c2972c38ef508e75181"},
+ {file = "cryptography-37.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3c81599befb4d4f3d7648ed3217e00d21a9341a9a688ecdd615ff72ffbed7336"},
+ {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2bd1096476aaac820426239ab534b636c77d71af66c547b9ddcd76eb9c79e004"},
+ {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31fe38d14d2e5f787e0aecef831457da6cec68e0bb09a35835b0b44ae8b988fe"},
+ {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093cb351031656d3ee2f4fa1be579a8c69c754cf874206be1d4cf3b542042804"},
+ {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59b281eab51e1b6b6afa525af2bd93c16d49358404f814fe2c2410058623928c"},
+ {file = "cryptography-37.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:0cc20f655157d4cfc7bada909dc5cc228211b075ba8407c46467f63597c78178"},
+ {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f8ec91983e638a9bcd75b39f1396e5c0dc2330cbd9ce4accefe68717e6779e0a"},
+ {file = "cryptography-37.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:46f4c544f6557a2fefa7ac8ac7d1b17bf9b647bd20b16decc8fbcab7117fbc15"},
+ {file = "cryptography-37.0.2-cp36-abi3-win32.whl", hash = "sha256:731c8abd27693323b348518ed0e0705713a36d79fdbd969ad968fbef0979a7e0"},
+ {file = "cryptography-37.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:471e0d70201c069f74c837983189949aa0d24bb2d751b57e26e3761f2f782b8d"},
+ {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a68254dd88021f24a68b613d8c51d5c5e74d735878b9e32cc0adf19d1f10aaf9"},
+ {file = "cryptography-37.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:a7d5137e556cc0ea418dca6186deabe9129cee318618eb1ffecbd35bee55ddc1"},
+ {file = "cryptography-37.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aeaba7b5e756ea52c8861c133c596afe93dd716cbcacae23b80bc238202dc023"},
+ {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e590dd70642eb2079d280420a888190aa040ad20f19ec8c6e097e38aa29e06"},
+ {file = "cryptography-37.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1b9362d34363f2c71b7853f6251219298124aa4cc2075ae2932e64c91a3e2717"},
+ {file = "cryptography-37.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e53258e69874a306fcecb88b7534d61820db8a98655662a3dd2ec7f1afd9132f"},
+ {file = "cryptography-37.0.2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:1f3bfbd611db5cb58ca82f3deb35e83af34bb8cf06043fa61500157d50a70982"},
+ {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:419c57d7b63f5ec38b1199a9521d77d7d1754eb97827bbb773162073ccd8c8d4"},
+ {file = "cryptography-37.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:dc26bb134452081859aa21d4990474ddb7e863aa39e60d1592800a8865a702de"},
+ {file = "cryptography-37.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b8398b3d0efc420e777c40c16764d6870bcef2eb383df9c6dbb9ffe12c64452"},
+ {file = "cryptography-37.0.2.tar.gz", hash = "sha256:f224ad253cc9cea7568f49077007d2263efa57396a2f2f78114066fd54b5c68e"},
]
deepdiff = [
- {file = "deepdiff-5.8.0-py3-none-any.whl", hash = "sha256:6b71714a6a5fb4cd6ab74c97f53303029118f96128082422342a9129a5f25c8f"},
- {file = "deepdiff-5.8.0.tar.gz", hash = "sha256:7e641c0cd6429c9e1b64a07b8f7713382a5626afe18c72bcafa8a4343c05c701"},
+ {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"},
+ {file = "deepdiff-5.8.1.tar.gz", hash = "sha256:8d4eb2c4e6cbc80b811266419cb71dd95a157094a3947ccf937a94d44943c7b8"},
]
distlib = [
{file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
{file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
]
+dulwich = [
+ {file = "dulwich-0.20.38.tar.gz", hash = "sha256:7346790d8735c86fbbc5b70b674f0ef94096c1e5099ba7273491628239817fc8"},
+]
entrypoints = [
{file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
{file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"},
]
filelock = [
- {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"},
- {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"},
+ {file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"},
+ {file = "filelock-3.7.0.tar.gz", hash = "sha256:b795f1b42a61bbf8ec7113c341dad679d772567b936fbd1bf43c9a238e673e20"},
]
flatdict = [
{file = "flatdict-4.0.1.tar.gz", hash = "sha256:cd32f08fd31ed21eb09ebc76f06b6bd12046a24f77beb1fd0281917e47f26742"},
@@ -908,8 +996,8 @@ httpretty = [
{file = "httpretty-1.1.4.tar.gz", hash = "sha256:20de0e5dd5a18292d36d928cc3d6e52f8b2ac73daec40d41eb62dee154933b68"},
]
identify = [
- {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"},
- {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"},
+ {file = "identify-2.5.0-py2.py3-none-any.whl", hash = "sha256:3acfe15a96e4272b4ec5662ee3e231ceba976ef63fd9980ed2ce9cc415df393f"},
+ {file = "identify-2.5.0.tar.gz", hash = "sha256:c83af514ea50bf2be2c4a3f2fb349442b59dc87284558ae9ff54191bff3541d2"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
@@ -971,6 +1059,35 @@ msgpack = [
{file = "msgpack-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d"},
{file = "msgpack-1.0.3.tar.gz", hash = "sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e"},
]
+mypy = [
+ {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"},
+ {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"},
+ {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"},
+ {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"},
+ {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"},
+ {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"},
+ {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"},
+ {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"},
+ {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"},
+ {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"},
+ {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"},
+ {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"},
+ {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"},
+ {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"},
+ {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"},
+ {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"},
+ {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"},
+ {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"},
+ {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"},
+ {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"},
+ {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"},
+ {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"},
+ {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"},
+]
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
nodeenv = [
{file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
{file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
@@ -992,8 +1109,8 @@ pkginfo = [
{file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"},
]
platformdirs = [
- {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"},
- {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
@@ -1004,12 +1121,12 @@ poetry-core = [
{file = "poetry_core-1.1.0a7-py3-none-any.whl", hash = "sha256:724e8b5368f270461e622396305d0c2e760ec9d4c14d072e6b944da9384c67de"},
]
poetry-plugin-export = [
- {file = "poetry-plugin-export-1.0.1.tar.gz", hash = "sha256:e2a87bef5b05cb37eee67fb25d9e4f8e8d538ab64a7ec582ab207366722b9dbe"},
- {file = "poetry_plugin_export-1.0.1-py3-none-any.whl", hash = "sha256:83902686faa7820be7e445978562c852dda94568bf63cc48ec47f55b250f1f40"},
+ {file = "poetry-plugin-export-1.0.2.tar.gz", hash = "sha256:4b4edcfa3656c11e5529a8f365f084933402cf7f9306163fdfe44e6735d7cf16"},
+ {file = "poetry_plugin_export-1.0.2-py3-none-any.whl", hash = "sha256:f27209ee3c162757bc08a6d0f534f4bdf8d737e966aa07d84e0058c8ab3ed66e"},
]
pre-commit = [
- {file = "pre_commit-2.18.1-py2.py3-none-any.whl", hash = "sha256:02226e69564ebca1a070bd1f046af866aa1c318dbc430027c50ab832ed2b73f2"},
- {file = "pre_commit-2.18.1.tar.gz", hash = "sha256:5d445ee1fa8738d506881c5d84f83c62bb5be6b2838e32207433647e8e5ebe10"},
+ {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"},
+ {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"},
]
ptyprocess = [
{file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
@@ -1028,16 +1145,16 @@ pylev = [
{file = "pylev-1.4.0.tar.gz", hash = "sha256:9e77e941042ad3a4cc305dcdf2b2dec1aec2fbe3dd9015d2698ad02b173006d1"},
]
pyparsing = [
- {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"},
- {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"},
+ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
+ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
]
pytest = [
- {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
- {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
+ {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
+ {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
]
pytest-cov = [
- {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"},
- {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"},
+ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
+ {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
]
pytest-mock = [
{file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"},
@@ -1094,8 +1211,8 @@ requests-toolbelt = [
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
]
secretstorage = [
- {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
- {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
+ {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"},
+ {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"},
]
shellingham = [
{file = "shellingham-1.4.0-py2.py3-none-any.whl", hash = "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9"},
@@ -1112,17 +1229,55 @@ toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
+tomli = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
tomlkit = [
- {file = "tomlkit-0.10.1-py3-none-any.whl", hash = "sha256:3eba517439dcb2f84cf39f4f85fd2c3398309823a3c75ac3e73003638daf7915"},
- {file = "tomlkit-0.10.1.tar.gz", hash = "sha256:3c517894eadef53e9072d343d37e4427b8f0b6200a70b7c9a19b2ebd1f53b951"},
+ {file = "tomlkit-0.10.2-py3-none-any.whl", hash = "sha256:905cf92c2111ef80d355708f47ac24ad1b6fc2adc5107455940088c9bbecaedb"},
+ {file = "tomlkit-0.10.2.tar.gz", hash = "sha256:30d54c0b914e595f3d10a87888599eab5321a2a69abc773bbefff51599b72db6"},
]
tox = [
{file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"},
{file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"},
]
+typed-ast = [
+ {file = "typed_ast-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ad3b48cf2b487be140072fb86feff36801487d4abb7382bb1929aaac80638ea"},
+ {file = "typed_ast-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:542cd732351ba8235f20faa0fc7398946fe1a57f2cdb289e5497e1e7f48cfedb"},
+ {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc2c11ae59003d4a26dda637222d9ae924387f96acae9492df663843aefad55"},
+ {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd5df1313915dbd70eaaa88c19030b441742e8b05e6103c631c83b75e0435ccc"},
+ {file = "typed_ast-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:e34f9b9e61333ecb0f7d79c21c28aa5cd63bec15cb7e1310d7d3da6ce886bc9b"},
+ {file = "typed_ast-1.5.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f818c5b81966d4728fec14caa338e30a70dfc3da577984d38f97816c4b3071ec"},
+ {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3042bfc9ca118712c9809201f55355479cfcdc17449f9f8db5e744e9625c6805"},
+ {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fff9fdcce59dc61ec1b317bdb319f8f4e6b69ebbe61193ae0a60c5f9333dc49"},
+ {file = "typed_ast-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8e0b8528838ffd426fea8d18bde4c73bcb4167218998cc8b9ee0a0f2bfe678a6"},
+ {file = "typed_ast-1.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ef1d96ad05a291f5c36895d86d1375c0ee70595b90f6bb5f5fdbee749b146db"},
+ {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed44e81517364cb5ba367e4f68fca01fba42a7a4690d40c07886586ac267d9b9"},
+ {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f60d9de0d087454c91b3999a296d0c4558c1666771e3460621875021bf899af9"},
+ {file = "typed_ast-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9e237e74fd321a55c90eee9bc5d44be976979ad38a29bbd734148295c1ce7617"},
+ {file = "typed_ast-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee852185964744987609b40aee1d2eb81502ae63ee8eef614558f96a56c1902d"},
+ {file = "typed_ast-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27e46cdd01d6c3a0dd8f728b6a938a6751f7bd324817501c15fb056307f918c6"},
+ {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d64dabc6336ddc10373922a146fa2256043b3b43e61f28961caec2a5207c56d5"},
+ {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8cdf91b0c466a6c43f36c1964772918a2c04cfa83df8001ff32a89e357f8eb06"},
+ {file = "typed_ast-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:9cc9e1457e1feb06b075c8ef8aeb046a28ec351b1958b42c7c31c989c841403a"},
+ {file = "typed_ast-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e20d196815eeffb3d76b75223e8ffed124e65ee62097e4e73afb5fec6b993e7a"},
+ {file = "typed_ast-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:37e5349d1d5de2f4763d534ccb26809d1c24b180a477659a12c4bde9dd677d74"},
+ {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f1a27592fac87daa4e3f16538713d705599b0a27dfe25518b80b6b017f0a6d"},
+ {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8831479695eadc8b5ffed06fdfb3e424adc37962a75925668deeb503f446c0a3"},
+ {file = "typed_ast-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:20d5118e494478ef2d3a2702d964dae830aedd7b4d3b626d003eea526be18718"},
+ {file = "typed_ast-1.5.3.tar.gz", hash = "sha256:27f25232e2dd0edfe1f019d6bfaaf11e86e657d9bdb7b0956db95f560cceb2b3"},
+]
+types-requests = [
+ {file = "types-requests-2.27.26.tar.gz", hash = "sha256:a6a04c0274c0949fd0525f35d8b53ac34e77afecbeb3c4932ddc6ce675ac009c"},
+ {file = "types_requests-2.27.26-py3-none-any.whl", hash = "sha256:302137cb5bd482357398a155faf3ed095855fbc6994e952d0496c7fd50f44125"},
+]
+types-urllib3 = [
+ {file = "types-urllib3-1.26.14.tar.gz", hash = "sha256:2a2578e4b36341ccd240b00fccda9826988ff0589a44ba4a664bbd69ef348d27"},
+ {file = "types_urllib3-1.26.14-py3-none-any.whl", hash = "sha256:5d2388aa76395b1e3999ff789ea5b3283677dad8e9bcf3d9117ba19271fd35d9"},
+]
typing-extensions = [
- {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
- {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
+ {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
+ {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
]
urllib3 = [
{file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
diff --git a/pyproject.toml b/pyproject.toml
index 86976b24947..d7a1db7b42e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -35,7 +35,7 @@ generate-setup-file = false
python = "^3.7"
poetry-core = "^1.1.0a7"
-poetry-plugin-export = "^1.0"
+poetry-plugin-export = "^1.0.2"
cachecontrol = { version = "^0.12.9", extras = ["filecache"] }
cachy = "^0.3.0"
cleo = "^1.0.0a4"
@@ -43,12 +43,13 @@ crashtest = "^0.3.0"
entrypoints = "^0.3"
html5lib = "^1.0"
importlib-metadata = { version = ">=1.6.0", python = "<3.8" }
-# packaging uses calver, so version is unclamped
+# keyring uses calver, so version is unclamped
keyring = ">=21.2.0"
# packaging uses calver, so version is unclamped
packaging = ">=20.4"
pexpect = "^4.7.0"
pkginfo = "^1.5"
+platformdirs = "^2.5.2"
requests = "^2.18"
requests-toolbelt = "^0.9.1"
shellingham = "^1.1"
@@ -56,11 +57,12 @@ tomlkit = ">=0.7.0,<1.0.0"
# exclude 20.4.5 - 20.4.6 due to https://github.com/pypa/pip/issues/9953
virtualenv = "(>=20.4.3,<20.4.5 || >=20.4.7)"
urllib3 = "^1.26.0"
+dulwich = "^0.20.35"
[tool.poetry.dev-dependencies]
tox = "^3.18"
-pytest = "^6.2"
-pytest-cov = "^2.8"
+pytest = "^7.1"
+pytest-cov = "^3.0"
pytest-mock = "^3.5"
pytest-sugar = "^0.9"
pre-commit = "^2.6"
@@ -69,6 +71,8 @@ httpretty = "^1.0"
typing-extensions = { version = "^4.0.0", python = "<3.8" }
zipp = { version = "^3.4", python = "<3.8" }
flatdict = "^4.0.1"
+mypy = ">=0.950"
+types-requests = ">=2.27.11"
[tool.poetry.scripts]
poetry = "poetry.console.application:main"
@@ -99,13 +103,10 @@ force-exclude = '''
[tool.mypy]
-check_untyped_defs = true
-ignore_missing_imports = true
-show_error_codes = true
-warn_redundant_casts = true
-warn_unused_configs = true
-warn_unused_ignores = true
files = "src"
+show_error_codes = true
+strict = true
+enable_error_code = ["ignore-without-code"]
# The following whitelist is used to allow for incremental adoption
# of Mypy. Modules should be removed from this whitelist as and when
@@ -115,18 +116,39 @@ files = "src"
[[tool.mypy.overrides]]
module = [
- 'poetry.console.commands.init',
- 'poetry.inspection.info',
- 'poetry.installation.chef',
- 'poetry.installation.chooser',
- 'poetry.installation.executor',
- 'poetry.installation.installer',
- 'poetry.installation.pip_installer',
- 'poetry.repositories.installed_repository',
'poetry.utils.env',
]
ignore_errors = true
+# use of importlib-metadata backport at python3.7 makes it impossible to
+# satisfy mypy without some ignores: but we get a different set of ignores at
+# different python versions.
+#
+# , meanwhile suppress that
+# warning.
+[[tool.mypy.overrides]]
+module = [
+ 'poetry.installation.executor',
+ 'poetry.repositories.installed_repository',
+]
+warn_unused_ignores = false
+
+[[tool.mypy.overrides]]
+module = [
+ 'cachecontrol.*',
+ 'cachy.*',
+ 'cleo.*',
+ 'crashtest.*',
+ 'entrypoints.*',
+ 'html5lib.*',
+ 'jsonschema.*',
+ 'pexpect.*',
+ 'pkginfo.*',
+ 'poetry.core.*',
+ 'requests_toolbelt.*',
+ 'shellingham.*',
+]
+ignore_missing_imports = true
[tool.coverage.report]
exclude_lines = [
diff --git a/src/poetry/__version__.py b/src/poetry/__version__.py
index 2c9a2eac138..13fa08fc202 100644
--- a/src/poetry/__version__.py
+++ b/src/poetry/__version__.py
@@ -1,6 +1,16 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from poetry.utils._compat import metadata
-__version__ = metadata.version("poetry")
+if TYPE_CHECKING:
+ from collections.abc import Callable
+
+
+# The metadata.version that we import for Python 3.7 is untyped, work around
+# that.
+version: Callable[[str], str] = metadata.version
+
+__version__ = version("poetry")
diff --git a/src/poetry/config/config.py b/src/poetry/config/config.py
index ec6f34bb338..8bd4a5070b8 100644
--- a/src/poetry/config/config.py
+++ b/src/poetry/config/config.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+import dataclasses
+import logging
import os
import re
@@ -7,13 +9,19 @@
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
-from typing import Callable
+
+from poetry.core.toml import TOMLFile
+from poetry.core.utils.helpers import canonicalize_name
from poetry.config.dict_config_source import DictConfigSource
+from poetry.config.file_config_source import FileConfigSource
from poetry.locations import CACHE_DIR
+from poetry.locations import CONFIG_DIR
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.config.config_source import ConfigSource
@@ -29,6 +37,73 @@ def int_normalizer(val: str) -> int:
return int(val)
+@dataclasses.dataclass
+class PackageFilterPolicy:
+ policy: dataclasses.InitVar[str | list[str] | None]
+ packages: list[str] = dataclasses.field(init=False)
+
+ def __post_init__(self, policy: str | list[str] | None) -> None:
+ if not policy:
+ policy = []
+ elif isinstance(policy, str):
+ policy = self.normalize(policy)
+ self.packages = policy
+
+ def allows(self, package_name: str) -> bool:
+ if ":all:" in self.packages:
+ return False
+
+ return (
+ not self.packages
+ or ":none:" in self.packages
+ or canonicalize_name(package_name) not in self.packages
+ )
+
+ @classmethod
+ def is_reserved(cls, name: str) -> bool:
+ return bool(re.match(r":(all|none):", name))
+
+ @classmethod
+ def normalize(cls, policy: str) -> list[str]:
+ if boolean_validator(policy):
+ if boolean_normalizer(policy):
+ return [":all:"]
+ else:
+ return [":none:"]
+
+ return list(
+ {
+ name.strip() if cls.is_reserved(name) else canonicalize_name(name)
+ for name in policy.strip().split(",")
+ if name
+ }
+ )
+
+ @classmethod
+ def validator(cls, policy: str) -> bool:
+ if boolean_validator(policy):
+ return True
+
+ names = policy.strip().split(",")
+
+ for name in names:
+ if (
+ not name
+ or (cls.is_reserved(name) and len(names) == 1)
+ or re.match(r"^[a-zA-Z\d_-]+$", name)
+ ):
+ continue
+ return False
+
+ return True
+
+
+logger = logging.getLogger(__name__)
+
+
+_default_config: Config | None = None
+
+
class Config:
default_config: dict[str, Any] = {
@@ -37,11 +112,20 @@ class Config:
"create": True,
"in-project": None,
"path": os.path.join("{cache-dir}", "virtualenvs"),
- "options": {"always-copy": False, "system-site-packages": False},
+ "options": {
+ "always-copy": False,
+ "system-site-packages": False,
+ # we default to False here in order to prevent development environment
+ # breakages for IDEs etc. as when working in these environments
+ # assumptions are often made about virtual environments having pip and
+ # setuptools.
+ "no-pip": False,
+ "no-setuptools": False,
+ },
"prefer-active-python": False,
},
- "experimental": {"new-installer": True},
- "installer": {"parallel": True, "max-workers": None},
+ "experimental": {"new-installer": True, "system-git-client": False},
+ "installer": {"parallel": True, "max-workers": None, "no-binary": None},
}
def __init__(
@@ -54,7 +138,7 @@ def __init__(
self._auth_config_source: ConfigSource = DictConfigSource()
@property
- def config(self) -> dict:
+ def config(self) -> dict[str, Any]:
return self._config
@property
@@ -81,7 +165,7 @@ def merge(self, config: dict[str, Any]) -> None:
merge_dicts(self._config, config)
def all(self) -> dict[str, Any]:
- def _all(config: dict, parent_key: str = "") -> dict:
+ def _all(config: dict[str, Any], parent_key: str = "") -> dict[str, Any]:
all_ = {}
for key in config:
@@ -103,6 +187,20 @@ def _all(config: dict, parent_key: str = "") -> dict:
def raw(self) -> dict[str, Any]:
return self._config
+ @staticmethod
+ def _get_environment_repositories() -> dict[str, dict[str, str]]:
+ repositories = {}
+ pattern = re.compile(r"POETRY_REPOSITORIES_(?P[A-Z_]+)_URL")
+
+ for env_key in os.environ.keys():
+ match = pattern.match(env_key)
+ if match:
+ repositories[match.group("name").lower().replace("_", "-")] = {
+ "url": os.environ[env_key]
+ }
+
+ return repositories
+
def get(self, setting_name: str, default: Any = None) -> Any:
"""
Retrieve a setting value.
@@ -112,6 +210,12 @@ def get(self, setting_name: str, default: Any = None) -> Any:
# Looking in the environment if the setting
# is set via a POETRY_* environment variable
if self._use_environment:
+ if setting_name == "repositories":
+ # repositories setting is special for now
+ repositories = self._get_environment_repositories()
+ if repositories:
+ return repositories
+
env = "POETRY_" + "_".join(k.upper().replace("-", "_") for k in keys)
env_value = os.getenv(env)
if env_value is not None:
@@ -130,10 +234,14 @@ def process(self, value: Any) -> Any:
if not isinstance(value, str):
return value
- return re.sub(r"{(.+?)}", lambda m: self.get(m.group(1)), value)
+ return re.sub(
+ r"{(.+?)}",
+ lambda m: self.get(m.group(1)), # type: ignore[no-any-return]
+ value,
+ )
@staticmethod
- def _get_normalizer(name: str) -> Callable:
+ def _get_normalizer(name: str) -> Callable[[str], Any]:
if name in {
"virtualenvs.create",
"virtualenvs.in-project",
@@ -141,6 +249,7 @@ def _get_normalizer(name: str) -> Callable:
"virtualenvs.options.system-site-packages",
"virtualenvs.options.prefer-active-python",
"experimental.new-installer",
+ "experimental.system-git-client",
"installer.parallel",
}:
return boolean_normalizer
@@ -151,4 +260,32 @@ def _get_normalizer(name: str) -> Callable:
if name == "installer.max-workers":
return int_normalizer
+ if name == "installer.no-binary":
+ return PackageFilterPolicy.normalize
+
return lambda val: val
+
+ @classmethod
+ def create(cls, reload: bool = False) -> Config:
+ global _default_config
+
+ if _default_config is None or reload:
+ _default_config = cls()
+
+ # Load global config
+ config_file = TOMLFile(CONFIG_DIR / "config.toml")
+ if config_file.exists():
+ logger.debug("Loading configuration file %s", config_file.path)
+ _default_config.merge(config_file.read())
+
+ _default_config.set_config_source(FileConfigSource(config_file))
+
+ # Load global auth config
+ auth_config_file = TOMLFile(CONFIG_DIR / "auth.toml")
+ if auth_config_file.exists():
+ logger.debug("Loading configuration file %s", auth_config_file.path)
+ _default_config.merge(auth_config_file.read())
+
+ _default_config.set_auth_config_source(FileConfigSource(auth_config_file))
+
+ return _default_config
diff --git a/src/poetry/config/file_config_source.py b/src/poetry/config/file_config_source.py
index 5d8584a8c57..bbf5c568942 100644
--- a/src/poetry/config/file_config_source.py
+++ b/src/poetry/config/file_config_source.py
@@ -30,7 +30,8 @@ def file(self) -> TOMLFile:
return self._file
def add_property(self, key: str, value: Any) -> None:
- with self.secure() as config:
+ with self.secure() as toml:
+ config: dict[str, Any] = toml
keys = key.split(".")
for i, key in enumerate(keys):
@@ -44,7 +45,8 @@ def add_property(self, key: str, value: Any) -> None:
config = config[key]
def remove_property(self, key: str) -> None:
- with self.secure() as config:
+ with self.secure() as toml:
+ config: dict[str, Any] = toml
keys = key.split(".")
current_config = config
diff --git a/src/poetry/console/application.py b/src/poetry/console/application.py
index 9d9c7ed581a..c37cd5a677e 100644
--- a/src/poetry/console/application.py
+++ b/src/poetry/console/application.py
@@ -7,7 +7,6 @@
from importlib import import_module
from typing import TYPE_CHECKING
from typing import Any
-from typing import Callable
from typing import cast
from cleo.application import Application as BaseApplication
@@ -24,6 +23,8 @@
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from cleo.events.console_command_event import ConsoleCommandEvent
from cleo.io.inputs.definition import Definition
from cleo.io.inputs.input import Input
@@ -37,12 +38,13 @@
from poetry.poetry import Poetry
-def load_command(name: str) -> Callable:
+def load_command(name: str) -> Callable[[], type[Command]]:
def _load() -> type[Command]:
words = name.split(" ")
module = import_module("poetry.console.commands." + ".".join(words))
command_class = getattr(module, "".join(c.title() for c in words) + "Command")
- return command_class()
+ command_type: type[Command] = command_class()
+ return command_type
return _load
@@ -89,13 +91,14 @@ def _load() -> type[Command]:
]
-class Application(BaseApplication):
+class Application(BaseApplication): # type: ignore[misc]
def __init__(self) -> None:
super().__init__("poetry", __version__)
self._poetry: Poetry | None = None
self._io: IO | None = None
self._disable_plugins = False
+ self._disable_cache = False
self._plugins_loaded = False
dispatcher = EventDispatcher()
@@ -117,14 +120,18 @@ def poetry(self) -> Poetry:
return self._poetry
self._poetry = Factory().create_poetry(
- Path.cwd(), io=self._io, disable_plugins=self._disable_plugins
+ Path.cwd(),
+ io=self._io,
+ disable_plugins=self._disable_plugins,
+ disable_cache=self._disable_cache,
)
return self._poetry
@property
def command_loader(self) -> CommandLoader:
- return self._command_loader
+ command_loader: CommandLoader = self._command_loader
+ return command_loader
def reset_poetry(self) -> None:
self._poetry = None
@@ -168,10 +175,12 @@ def render_error(self, error: Exception, io: IO) -> None:
def _run(self, io: IO) -> int:
self._disable_plugins = io.input.parameter_option("--no-plugins")
+ self._disable_cache = io.input.has_parameter_option("--no-cache")
self._load_plugins(io)
- return super()._run(io)
+ exit_code: int = super()._run(io)
+ return exit_code
def _configure_io(self, io: IO) -> None:
# We need to check if the command being run
@@ -207,7 +216,7 @@ def _configure_io(self, io: IO) -> None:
io.set_input(run_input)
- return super()._configure_io(io)
+ super()._configure_io(io)
def register_command_loggers(
self, event: ConsoleCommandEvent, event_name: str, _: Any
@@ -347,6 +356,12 @@ def _default_definition(self) -> Definition:
Option("--no-plugins", flag=True, description="Disables plugins.")
)
+ definition.add_option(
+ Option(
+ "--no-cache", flag=True, description="Disables Poetry source caches."
+ )
+ )
+
return definition
def _get_solution_provider_repository(self) -> SolutionProviderRepository:
@@ -365,7 +380,8 @@ def _get_solution_provider_repository(self) -> SolutionProviderRepository:
def main() -> int:
- return Application().run()
+ exit_code: int = Application().run()
+ return exit_code
if __name__ == "__main__":
diff --git a/src/poetry/console/command_loader.py b/src/poetry/console/command_loader.py
index 91485766f01..40f6b7f31bd 100644
--- a/src/poetry/console/command_loader.py
+++ b/src/poetry/console/command_loader.py
@@ -1,13 +1,21 @@
from __future__ import annotations
-from typing import Callable
+from typing import TYPE_CHECKING
from cleo.exceptions import LogicException
from cleo.loaders.factory_command_loader import FactoryCommandLoader
-class CommandLoader(FactoryCommandLoader):
- def register_factory(self, command_name: str, factory: Callable) -> None:
+if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from poetry.console.commands.command import Command
+
+
+class CommandLoader(FactoryCommandLoader): # type: ignore[misc]
+ def register_factory(
+ self, command_name: str, factory: Callable[[], Command]
+ ) -> None:
if command_name in self._factories:
raise LogicException(f'The command "{command_name}" already exists.')
diff --git a/src/poetry/console/commands/about.py b/src/poetry/console/commands/about.py
index b9af2ddc1cb..1841c5448c7 100644
--- a/src/poetry/console/commands/about.py
+++ b/src/poetry/console/commands/about.py
@@ -1,8 +1,14 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from poetry.console.commands.command import Command
+if TYPE_CHECKING:
+ from collections.abc import Callable
+
+
class AboutCommand(Command):
name = "about"
@@ -12,12 +18,16 @@ class AboutCommand(Command):
def handle(self) -> None:
from poetry.utils._compat import metadata
+ # The metadata.version that we import for Python 3.7 is untyped, work around
+ # that.
+ version: Callable[[str], str] = metadata.version
+
self.line(
f"""\
Poetry - Package Management for Python
-Version: {metadata.version('poetry')}
-Poetry-Core Version: {metadata.version('poetry-core')}
+Version: {version('poetry')}
+Poetry-Core Version: {version('poetry-core')}
Poetry is a dependency manager tracking local dependencies of your projects\
and libraries.
diff --git a/src/poetry/console/commands/add.py b/src/poetry/console/commands/add.py
index 5f8879ff05a..01d2e611bb6 100644
--- a/src/poetry/console/commands/add.py
+++ b/src/poetry/console/commands/add.py
@@ -2,10 +2,12 @@
import contextlib
+from typing import Any
from typing import cast
from cleo.helpers import argument
from cleo.helpers import option
+from tomlkit.toml_document import TOMLDocument
try:
@@ -114,7 +116,9 @@ def handle(self) -> int:
"You can only specify one package when using the --extras option"
)
- content = self.poetry.file.read()
+ # tomlkit types are awkward to work with, treat content as a mostly untyped
+ # dictionary.
+ content: dict[str, Any] = self.poetry.file.read()
poetry_content = content["tool"]["poetry"]
if group == MAIN_GROUP:
@@ -130,9 +134,10 @@ def handle(self) -> int:
groups = poetry_content["group"]
if group not in groups:
- group_table = parse_toml(
+ dependencies_toml: dict[str, Any] = parse_toml(
f"[tool.poetry.group.{group}.dependencies]\n\n"
- )["tool"]["poetry"]["group"][group]
+ )
+ group_table = dependencies_toml["tool"]["poetry"]["group"][group]
poetry_content["group"][group] = group_table
if "dependencies" not in poetry_content["group"][group]:
@@ -158,11 +163,13 @@ def handle(self) -> int:
)
for _constraint in requirements:
- if "version" in _constraint:
+ version = _constraint.get("version")
+ if version is not None:
# Validate version constraint
- parse_constraint(_constraint["version"])
+ assert isinstance(version, str)
+ parse_constraint(version)
- constraint = inline_table()
+ constraint: dict[str, Any] = inline_table()
for name, value in _constraint.items():
if name == "name":
continue
@@ -210,16 +217,18 @@ def handle(self) -> int:
if len(constraint) == 1 and "version" in constraint:
constraint = constraint["version"]
- section[_constraint["name"]] = constraint
+ constraint_name = _constraint["name"]
+ assert isinstance(constraint_name, str)
+ section[constraint_name] = constraint
with contextlib.suppress(ValueError):
self.poetry.package.dependency_group(group).remove_dependency(
- _constraint["name"]
+ constraint_name
)
self.poetry.package.add_dependency(
Factory.create_dependency(
- _constraint["name"],
+ constraint_name,
constraint,
groups=[group],
root_dir=self.poetry.file.parent,
@@ -247,12 +256,13 @@ def handle(self) -> int:
status = self._installer.run()
if status == 0 and not self.option("dry-run"):
+ assert isinstance(content, TOMLDocument)
self.poetry.file.write(content)
return status
def get_existing_packages_from_input(
- self, packages: list[str], section: dict
+ self, packages: list[str], section: dict[str, Any]
) -> list[str]:
existing_packages = []
diff --git a/src/poetry/console/commands/build.py b/src/poetry/console/commands/build.py
index 9d5545326ca..a691aed0c99 100644
--- a/src/poetry/console/commands/build.py
+++ b/src/poetry/console/commands/build.py
@@ -3,6 +3,7 @@
from cleo.helpers import option
from poetry.console.commands.env_command import EnvCommand
+from poetry.utils.env import build_environment
class BuildCommand(EnvCommand):
@@ -23,11 +24,12 @@ class BuildCommand(EnvCommand):
def handle(self) -> None:
from poetry.core.masonry.builder import Builder
- fmt = self.option("format") or "all"
- package = self.poetry.package
- self.line(
- f"Building {package.pretty_name} ({package.version})"
- )
+ with build_environment(poetry=self.poetry, env=self.env, io=self.io) as env:
+ fmt = self.option("format") or "all"
+ package = self.poetry.package
+ self.line(
+ f"Building {package.pretty_name} ({package.version})"
+ )
- builder = Builder(self.poetry)
- builder.build(fmt, executable=self.env.python)
+ builder = Builder(self.poetry)
+ builder.build(fmt, executable=env.python)
diff --git a/src/poetry/console/commands/command.py b/src/poetry/console/commands/command.py
index 17ad542c155..4bc26ad567b 100644
--- a/src/poetry/console/commands/command.py
+++ b/src/poetry/console/commands/command.py
@@ -12,7 +12,7 @@
from poetry.poetry import Poetry
-class Command(BaseCommand):
+class Command(BaseCommand): # type: ignore[misc]
loggers: list[str] = []
_poetry: Poetry | None = None
@@ -28,7 +28,8 @@ def set_poetry(self, poetry: Poetry) -> None:
self._poetry = poetry
def get_application(self) -> Application:
- return self.application
+ application: Application = self.application
+ return application
def reset_poetry(self) -> None:
self.get_application().reset_poetry()
diff --git a/src/poetry/console/commands/config.py b/src/poetry/console/commands/config.py
index 058e0560284..8a7ae790a4b 100644
--- a/src/poetry/console/commands/config.py
+++ b/src/poetry/console/commands/config.py
@@ -10,6 +10,7 @@
from cleo.helpers import argument
from cleo.helpers import option
+from poetry.config.config import PackageFilterPolicy
from poetry.console.commands.command import Command
@@ -58,7 +59,7 @@ def unique_config_values(self) -> dict[str, tuple[Any, Any, Any]]:
"cache-dir": (
str,
lambda val: str(Path(val)),
- str(Path(CACHE_DIR) / "virtualenvs"),
+ str(CACHE_DIR / "virtualenvs"),
),
"virtualenvs.create": (boolean_validator, boolean_normalizer, True),
"virtualenvs.in-project": (boolean_validator, boolean_normalizer, False),
@@ -72,10 +73,20 @@ def unique_config_values(self) -> dict[str, tuple[Any, Any, Any]]:
boolean_normalizer,
False,
),
+ "virtualenvs.options.no-pip": (
+ boolean_validator,
+ boolean_normalizer,
+ False,
+ ),
+ "virtualenvs.options.no-setuptools": (
+ boolean_validator,
+ boolean_normalizer,
+ False,
+ ),
"virtualenvs.path": (
str,
lambda val: str(Path(val)),
- str(Path(CACHE_DIR) / "virtualenvs"),
+ str(CACHE_DIR / "virtualenvs"),
),
"virtualenvs.prefer-active-python": (
boolean_validator,
@@ -97,6 +108,11 @@ def unique_config_values(self) -> dict[str, tuple[Any, Any, Any]]:
int_normalizer,
None,
),
+ "installer.no-binary": (
+ PackageFilterPolicy.validator,
+ PackageFilterPolicy.normalize,
+ None,
+ ),
}
return unique_config_values
@@ -107,12 +123,12 @@ def handle(self) -> int | None:
from poetry.core.pyproject.exceptions import PyProjectException
from poetry.core.toml.file import TOMLFile
+ from poetry.config.config import Config
from poetry.config.file_config_source import FileConfigSource
- from poetry.factory import Factory
from poetry.locations import CONFIG_DIR
- config = Factory.create_config(self.io)
- config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
+ config = Config.create()
+ config_file = TOMLFile(CONFIG_DIR / "config.toml")
try:
local_config_file = TOMLFile(self.poetry.file.parent / "poetry.toml")
@@ -308,7 +324,8 @@ def _list_configuration(
if isinstance(value, dict):
k += f"{key}."
- self._list_configuration(value, cast(dict, raw_val), k=k)
+ raw_val = cast("dict[str, Any]", raw_val)
+ self._list_configuration(value, raw_val, k=k)
k = orig_k
continue
@@ -328,50 +345,3 @@ def _list_configuration(
message = f"{k + key} = {json.dumps(value)}"
self.line(message)
-
- def _get_setting(
- self,
- contents: dict,
- setting: str | None = None,
- k: str | None = None,
- default: Any | None = None,
- ) -> list[tuple[str, str]]:
- orig_k = k
-
- if setting and setting.split(".")[0] not in contents:
- value = json.dumps(default)
-
- return [((k or "") + setting, value)]
- else:
- values = []
- for key, value in contents.items():
- if setting and key != setting.split(".")[0]:
- continue
-
- if isinstance(value, dict) or key == "repositories" and k is None:
- if k is None:
- k = ""
-
- k += re.sub(r"^config\.", "", key + ".")
- if setting and len(setting) > 1:
- setting = ".".join(setting.split(".")[1:])
-
- values += self._get_setting(
- cast(dict, value), k=k, setting=setting, default=default
- )
- k = orig_k
-
- continue
-
- if isinstance(value, list):
- value = ", ".join(
- json.dumps(val) if isinstance(val, list) else val
- for val in value
- )
- value = f"[{value}]"
-
- value = json.dumps(value)
-
- values.append(((k or "") + key, value))
-
- return values
diff --git a/src/poetry/console/commands/debug/info.py b/src/poetry/console/commands/debug/info.py
index bc062b118df..9f79f36be10 100644
--- a/src/poetry/console/commands/debug/info.py
+++ b/src/poetry/console/commands/debug/info.py
@@ -25,4 +25,5 @@ def handle(self) -> int:
)
command = self.application.get("env info")
- return command.run(self._io)
+ exit_code: int = command.run(self._io)
+ return exit_code
diff --git a/src/poetry/console/commands/debug/resolve.py b/src/poetry/console/commands/debug/resolve.py
index f28d246101d..5a5764c1d28 100644
--- a/src/poetry/console/commands/debug/resolve.py
+++ b/src/poetry/console/commands/debug/resolve.py
@@ -41,7 +41,7 @@ def handle(self) -> int:
from poetry.core.packages.project_package import ProjectPackage
from poetry.factory import Factory
- from poetry.puzzle import Solver
+ from poetry.puzzle.solver import Solver
from poetry.repositories.pool import Pool
from poetry.repositories.repository import Repository
from poetry.utils.env import EnvManager
@@ -68,6 +68,7 @@ def handle(self) -> int:
for constraint in requirements:
name = constraint.pop("name")
+ assert isinstance(name, str)
extras = []
for extra in self.option("extras"):
if " " in extra:
diff --git a/src/poetry/console/commands/group_command.py b/src/poetry/console/commands/group_command.py
index 1263dc6d356..4e84f0da5e1 100644
--- a/src/poetry/console/commands/group_command.py
+++ b/src/poetry/console/commands/group_command.py
@@ -15,8 +15,7 @@
if TYPE_CHECKING:
from cleo.io.inputs.option import Option
-
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
class GroupCommand(EnvCommand):
diff --git a/src/poetry/console/commands/init.py b/src/poetry/console/commands/init.py
index b2271518ed9..7079a52da21 100644
--- a/src/poetry/console/commands/init.py
+++ b/src/poetry/console/commands/init.py
@@ -1,20 +1,20 @@
from __future__ import annotations
-import os
-import re
import sys
-import urllib.parse
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
+from typing import Dict
from typing import Mapping
+from typing import Union
from cleo.helpers import option
from tomlkit import inline_table
from poetry.console.commands.command import Command
from poetry.console.commands.env_command import EnvCommand
+from poetry.utils.dependency_specification import parse_dependency_specification
from poetry.utils.helpers import canonicalize_name
@@ -24,6 +24,8 @@
from poetry.repositories import Pool
+Requirements = Dict[str, Union[str, Mapping[str, Any]]]
+
class InitCommand(Command):
name = "init"
@@ -164,7 +166,7 @@ def handle(self) -> int:
if self.io.is_interactive():
self.line("")
- requirements = {}
+ requirements: Requirements = {}
if self.option("dependency"):
requirements = self._format_requirements(
self._determine_requirements(self.option("dependency"))
@@ -194,7 +196,7 @@ def handle(self) -> int:
if self.io.is_interactive():
self.line("")
- dev_requirements: dict[str, str] = {}
+ dev_requirements: Requirements = {}
if self.option("dev-dependency"):
dev_requirements = self._format_requirements(
self._determine_requirements(self.option("dev-dependency"))
@@ -266,9 +268,9 @@ def _determine_requirements(
requires: list[str],
allow_prereleases: bool = False,
source: str | None = None,
- ) -> list[dict[str, str | list[str]]]:
+ ) -> list[dict[str, Any]]:
if not requires:
- requires = []
+ result = []
package = self.ask(
"Search for package to add (or leave blank to continue):"
@@ -282,7 +284,7 @@ def _determine_requirements(
or "version" in constraint
):
self.line(f"Adding {package}")
- requires.append(constraint)
+ result.append(constraint)
package = self.ask("\nAdd a package:")
continue
@@ -339,16 +341,15 @@ def _determine_requirements(
constraint["version"] = package_constraint
if package is not False:
- requires.append(constraint)
+ result.append(constraint)
if self.io.is_interactive():
package = self.ask("\nAdd a package:")
- return requires
+ return result
- requires = self._parse_requirements(requires)
result = []
- for requirement in requires:
+ for requirement in self._parse_requirements(requires):
if "git" in requirement or "url" in requirement or "path" in requirement:
result.append(requirement)
continue
@@ -402,137 +403,22 @@ def _find_best_version_for_package(
def _parse_requirements(self, requirements: list[str]) -> list[dict[str, Any]]:
from poetry.core.pyproject.exceptions import PyProjectException
- from poetry.puzzle.provider import Provider
-
- result = []
-
try:
cwd = self.poetry.file.parent
except (PyProjectException, RuntimeError):
cwd = Path.cwd()
- for requirement in requirements:
- requirement = requirement.strip()
- extras = []
- extras_m = re.search(r"\[([\w\d,-_ ]+)\]$", requirement)
- if extras_m:
- extras = [e.strip() for e in extras_m.group(1).split(",")]
- requirement, _ = requirement.split("[")
-
- url_parsed = urllib.parse.urlparse(requirement)
- if url_parsed.scheme and url_parsed.netloc:
- # Url
- if url_parsed.scheme in ["git+https", "git+ssh"]:
- from poetry.core.vcs.git import Git
- from poetry.core.vcs.git import ParsedUrl
-
- parsed = ParsedUrl.parse(requirement)
- url = Git.normalize_url(requirement)
-
- pair = {"name": parsed.name, "git": url.url}
- if parsed.rev:
- pair["rev"] = url.revision
-
- if extras:
- pair["extras"] = extras
-
- package = Provider.get_package_from_vcs(
- "git", url.url, rev=pair.get("rev")
- )
- pair["name"] = package.name
- result.append(pair)
-
- continue
- elif url_parsed.scheme in ["http", "https"]:
- package = Provider.get_package_from_url(requirement)
-
- pair = {"name": package.name, "url": package.source_url}
- if extras:
- pair["extras"] = extras
-
- result.append(pair)
- continue
- elif (os.path.sep in requirement or "/" in requirement) and (
- cwd.joinpath(requirement).exists()
- or Path(requirement).expanduser().exists()
- and Path(requirement).expanduser().is_absolute()
- ):
- path = Path(requirement).expanduser()
- is_absolute = path.is_absolute()
-
- if not path.is_absolute():
- path = cwd.joinpath(requirement)
-
- if path.is_file():
- package = Provider.get_package_from_file(path.resolve())
- else:
- package = Provider.get_package_from_directory(path.resolve())
-
- result.append(
- dict(
- [
- ("name", package.name),
- (
- "path",
- path.relative_to(cwd).as_posix()
- if not is_absolute
- else path.as_posix(),
- ),
- ]
- + ([("extras", extras)] if extras else [])
- )
- )
-
- continue
-
- pair = re.sub(
- "^([^@=: ]+)(?:@|==|(?~!])=|:| )(.*)$", "\\1 \\2", requirement
+ return [
+ parse_dependency_specification(
+ requirement=requirement,
+ env=self.env if isinstance(self, EnvCommand) and self.env else None,
+ cwd=cwd,
)
- pair = pair.strip()
-
- require: dict[str, str] = {}
- if " " in pair:
- name, version = pair.split(" ", 2)
- extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
- if extras_m:
- extras = [e.strip() for e in extras_m.group(1).split(",")]
- name, _ = name.split("[")
-
- require["name"] = name
- if version != "latest":
- require["version"] = version
- else:
- m = re.match(
- r"^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip()
- )
- if m:
- name, constraint = m.group(1), m.group(2)
- extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
- if extras_m:
- extras = [e.strip() for e in extras_m.group(1).split(",")]
- name, _ = name.split("[")
-
- require["name"] = name
- require["version"] = constraint
- else:
- extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair)
- if extras_m:
- extras = [e.strip() for e in extras_m.group(1).split(",")]
- pair, _ = pair.split("[")
-
- require["name"] = pair
-
- if extras:
- require["extras"] = extras
-
- result.append(require)
-
- return result
+ for requirement in requirements
+ ]
- def _format_requirements(
- self, requirements: list[dict[str, str]]
- ) -> Mapping[str, str | Mapping[str, str]]:
- requires = {}
+ def _format_requirements(self, requirements: list[dict[str, str]]) -> Requirements:
+ requires: Requirements = {}
for requirement in requirements:
name = requirement.pop("name")
constraint: str | InlineTable
diff --git a/src/poetry/console/commands/install.py b/src/poetry/console/commands/install.py
index a377120d4b1..8af7cbdc420 100644
--- a/src/poetry/console/commands/install.py
+++ b/src/poetry/console/commands/install.py
@@ -73,7 +73,7 @@ class InstallCommand(InstallerCommand):
def handle(self) -> int:
from poetry.core.masonry.utils.module import ModuleOrPackageNotFound
- from poetry.masonry.builders import EditableBuilder
+ from poetry.masonry.builders.editable import EditableBuilder
self._installer.use_executor(
self.poetry.config.get("experimental.new-installer", False)
diff --git a/src/poetry/console/commands/plugin/add.py b/src/poetry/console/commands/plugin/add.py
index 6fc45c28c2a..ea41fbb1f71 100644
--- a/src/poetry/console/commands/plugin/add.py
+++ b/src/poetry/console/commands/plugin/add.py
@@ -2,6 +2,7 @@
import os
+from typing import Any
from typing import cast
from cleo.helpers import argument
@@ -61,11 +62,11 @@ def handle(self) -> int:
from cleo.io.inputs.string_input import StringInput
from cleo.io.io import IO
+ from poetry.core.packages.project_package import ProjectPackage
from poetry.core.pyproject.toml import PyProjectTOML
from poetry.core.semver.helpers import parse_constraint
from poetry.factory import Factory
- from poetry.packages.project_package import ProjectPackage
from poetry.repositories.installed_repository import InstalledRepository
from poetry.utils.env import EnvManager
@@ -78,7 +79,7 @@ def handle(self) -> int:
# We check for the plugins existence first.
if env_dir.joinpath("pyproject.toml").exists():
- pyproject = tomlkit.loads(
+ pyproject: dict[str, Any] = tomlkit.loads(
env_dir.joinpath("pyproject.toml").read_text(encoding="utf-8")
)
poetry_content = pyproject["tool"]["poetry"]
@@ -115,21 +116,23 @@ def handle(self) -> int:
break
- root_package.python_versions = ".".join( # type: ignore[union-attr]
+ assert root_package is not None
+
+ root_package.python_versions = ".".join(
str(v) for v in system_env.version_info[:3]
)
# We create a `pyproject.toml` file based on all the information
# we have about the current environment.
if not env_dir.joinpath("pyproject.toml").exists():
Factory.create_pyproject_from_package(
- root_package, # type: ignore[arg-type]
+ root_package,
env_dir,
)
# We add the plugins to the dependencies section of the previously
# created `pyproject.toml` file
- pyproject = PyProjectTOML(env_dir.joinpath("pyproject.toml"))
- poetry_content = pyproject.poetry_config
+ pyproject_toml = PyProjectTOML(env_dir.joinpath("pyproject.toml"))
+ poetry_content = pyproject_toml.poetry_config
poetry_dependency_section = poetry_content["dependencies"]
plugin_names = []
for plugin in plugins:
@@ -137,7 +140,7 @@ def handle(self) -> int:
# Validate version constraint
parse_constraint(plugin["version"])
- constraint = tomlkit.inline_table()
+ constraint: dict[str, Any] = tomlkit.inline_table()
for name, value in plugin.items():
if name == "name":
continue
@@ -150,7 +153,7 @@ def handle(self) -> int:
poetry_dependency_section[plugin["name"]] = constraint
plugin_names.append(plugin["name"])
- pyproject.save()
+ pyproject_toml.save()
# From this point forward, all the logic will be deferred to
# the update command, by using the previously created `pyproject.toml`
@@ -167,16 +170,17 @@ def handle(self) -> int:
if self.option("dry-run"):
argv.append("--dry-run")
- return update_command.run(
+ exit_code: int = update_command.run(
IO(
StringInput(" ".join(argv)),
self._io.output,
self._io.error_output,
)
)
+ return exit_code
def get_existing_packages_from_input(
- self, packages: list[str], poetry_content: dict, target_section: str
+ self, packages: list[str], poetry_content: dict[str, Any], target_section: str
) -> list[str]:
existing_packages = []
diff --git a/src/poetry/console/commands/plugin/remove.py b/src/poetry/console/commands/plugin/remove.py
index ae8db05e795..b7918feafa2 100644
--- a/src/poetry/console/commands/plugin/remove.py
+++ b/src/poetry/console/commands/plugin/remove.py
@@ -59,10 +59,11 @@ def handle(self) -> int:
if self.option("dry-run"):
argv.append("--dry-run")
- return remove_command.run(
+ exit_code: int = remove_command.run(
IO(
StringInput(" ".join(argv)),
self._io.output,
self._io.error_output,
)
)
+ return exit_code
diff --git a/src/poetry/console/commands/plugin/show.py b/src/poetry/console/commands/plugin/show.py
index 1a529200f02..f5d1bbaf4ea 100644
--- a/src/poetry/console/commands/plugin/show.py
+++ b/src/poetry/console/commands/plugin/show.py
@@ -1,16 +1,11 @@
from __future__ import annotations
from collections import defaultdict
-from typing import TYPE_CHECKING
-from typing import DefaultDict
+from typing import Any
from poetry.console.commands.command import Command
-if TYPE_CHECKING:
- from poetry.core.packages.package import Package
-
-
class PluginShowCommand(Command):
name = "plugin show"
@@ -26,7 +21,7 @@ def handle(self) -> int:
from poetry.utils.helpers import canonicalize_name
from poetry.utils.helpers import pluralize
- plugins: DefaultDict[str, dict[str, Package | list[str]]] = defaultdict(
+ plugins: dict[str, dict[str, Any]] = defaultdict(
lambda: {
"package": None,
"plugins": [],
diff --git a/src/poetry/console/commands/remove.py b/src/poetry/console/commands/remove.py
index 07a1011c564..5dbf4499750 100644
--- a/src/poetry/console/commands/remove.py
+++ b/src/poetry/console/commands/remove.py
@@ -4,6 +4,7 @@
from cleo.helpers import argument
from cleo.helpers import option
+from tomlkit.toml_document import TOMLDocument
try:
@@ -50,7 +51,7 @@ def handle(self) -> int:
else:
group = self.option("group", self.default_group)
- content = self.poetry.file.read()
+ content: dict[str, Any] = self.poetry.file.read()
poetry_content = content["tool"]["poetry"]
if group is None:
@@ -114,6 +115,7 @@ def handle(self) -> int:
status = self._installer.run()
if not self.option("dry-run") and status == 0:
+ assert isinstance(content, TOMLDocument)
self.poetry.file.write(content)
return status
diff --git a/src/poetry/console/commands/self/update.py b/src/poetry/console/commands/self/update.py
index 32ffaff1cef..4f362db3d85 100644
--- a/src/poetry/console/commands/self/update.py
+++ b/src/poetry/console/commands/self/update.py
@@ -191,7 +191,7 @@ def _update(self, version: Version) -> None:
root,
NullLocker(self.data_dir.joinpath("poetry.lock"), {}),
self.pool,
- Config(),
+ config=Config.create(),
installed=installed,
)
installer.update(True)
diff --git a/src/poetry/console/commands/show.py b/src/poetry/console/commands/show.py
index 22e9b7b4f2c..8764595c3e3 100644
--- a/src/poetry/console/commands/show.py
+++ b/src/poetry/console/commands/show.py
@@ -1,9 +1,13 @@
from __future__ import annotations
from typing import TYPE_CHECKING
+from typing import cast
from cleo.helpers import argument
from cleo.helpers import option
+from poetry.core.packages.directory_dependency import DirectoryDependency
+from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.packages.vcs_dependency import VCSDependency
from poetry.console.commands.group_command import GroupCommand
@@ -12,12 +16,23 @@
from cleo.io.io import IO
from poetry.core.packages.dependency import Dependency
from poetry.core.packages.package import Package
+ from poetry.core.packages.project_package import ProjectPackage
- from poetry.packages.project_package import ProjectPackage
from poetry.repositories.installed_repository import InstalledRepository
from poetry.repositories.repository import Repository
+def reverse_deps(pkg: Package, repo: Repository) -> dict[str, str]:
+ required_by = {}
+ for locked in repo.packages:
+ dependencies = {d.name: d.pretty_constraint for d in locked.requires}
+
+ if pkg.name in dependencies:
+ required_by[locked.pretty_name] = dependencies[pkg.name]
+
+ return required_by
+
+
class ShowCommand(GroupCommand):
name = "show"
@@ -32,6 +47,11 @@ class ShowCommand(GroupCommand):
"Do not list the development dependencies. (Deprecated)",
),
option("tree", "t", "List the dependencies as a tree."),
+ option(
+ "why",
+ None,
+ "When listing the tree for a single package, start from parents.",
+ ),
option("latest", "l", "Show the latest version."),
option(
"outdated",
@@ -65,6 +85,23 @@ def handle(self) -> int | None:
if self.option("tree"):
self.init_styles(self.io)
+ if self.option("why"):
+ if self.option("tree") and package is None:
+ self.line_error(
+ "Error: --why requires a package when combined with"
+ " --tree."
+ )
+
+ return 1
+
+ if not self.option("tree") and package:
+ self.line_error(
+ "Error: --why cannot be used without --tree when displaying"
+ " a single package."
+ )
+
+ return 1
+
if self.option("outdated"):
self._io.input.set_option("latest", True)
@@ -79,13 +116,13 @@ def handle(self) -> int | None:
root = self.project_with_activated_groups_only()
# Show tree view if requested
- if self.option("tree") and not package:
+ if self.option("tree") and package is None:
requires = root.all_requires
packages = locked_repo.packages
- for pkg in packages:
+ for p in packages:
for require in requires:
- if pkg.name == require.name:
- self.display_package_tree(self._io, pkg, locked_repo)
+ if p.name == require.name:
+ self.display_package_tree(self._io, p, locked_repo)
break
return 0
@@ -117,17 +154,38 @@ def handle(self) -> int | None:
if not pkg:
raise ValueError(f"Package {package} not found")
+ required_by = reverse_deps(pkg, locked_repo)
+
if self.option("tree"):
- self.display_package_tree(self.io, pkg, locked_repo)
+ if self.option("why"):
+ # The default case if there's no reverse dependencies is to query
+ # the subtree for pkg but if any rev-deps exist we'll query for each
+ # of them in turn
+ packages = [pkg]
+ if required_by:
+ packages = [
+ p
+ for p in locked_packages
+ for r in required_by.keys()
+ if p.name == r
+ ]
+ else:
+ # if no rev-deps exist we'll make this clear as it can otherwise
+ # look very odd for packages that also have no or few direct
+ # dependencies
+ self._io.write_line(
+ f"Package {package} is a direct dependency."
+ )
- return 0
+ for p in packages:
+ self.display_package_tree(
+ self._io, p, locked_repo, why_package=pkg
+ )
- required_by = {}
- for locked in locked_packages:
- dependencies = {d.name: d.pretty_constraint for d in locked.requires}
+ else:
+ self.display_package_tree(self._io, pkg, locked_repo)
- if pkg.name in dependencies:
- required_by[locked.pretty_name] = dependencies[pkg.name]
+ return 0
rows = [
["name>", f" : {pkg.pretty_name}>"],
@@ -159,7 +217,7 @@ def handle(self) -> int | None:
show_all = self.option("all")
terminal = Terminal()
width = terminal.width
- name_length = version_length = latest_length = 0
+ name_length = version_length = latest_length = required_by_length = 0
latest_packages = {}
latest_statuses = {}
installed_repo = InstalledRepository.load(self.env)
@@ -204,6 +262,13 @@ def handle(self) -> int | None:
)
),
)
+
+ if self.option("why"):
+ required_by = reverse_deps(locked, locked_repo)
+ required_by_length = max(
+ required_by_length,
+ len(" from " + ",".join(required_by.keys())),
+ )
else:
name_length = max(name_length, current_length)
version_length = max(
@@ -215,9 +280,20 @@ def handle(self) -> int | None:
),
)
+ if self.option("why"):
+ required_by = reverse_deps(locked, locked_repo)
+ required_by_length = max(
+ required_by_length, len(" from " + ",".join(required_by.keys()))
+ )
+
write_version = name_length + version_length + 3 <= width
write_latest = name_length + version_length + latest_length + 3 <= width
- write_description = name_length + version_length + latest_length + 24 <= width
+
+ why_end_column = (
+ name_length + version_length + latest_length + required_by_length
+ )
+ write_why = self.option("why") and (why_end_column + 3) <= width
+ write_description = (why_end_column + 24) <= width
for locked in locked_packages:
color = "cyan"
@@ -269,9 +345,21 @@ def handle(self) -> int | None:
)
line += f" {version:{latest_length}}>"
+ if write_why:
+ required_by = reverse_deps(locked, locked_repo)
+ if required_by:
+ content = ",".join(required_by.keys())
+ # subtract 6 for ' from '
+ line += f" from {content:{required_by_length - 6}}"
+ else:
+ line += " " * required_by_length
+
if write_description:
description = locked.description
- remaining = width - name_length - version_length - 4
+ remaining = (
+ width - name_length - version_length - required_by_length - 4
+ )
+
if show_latest:
remaining -= latest_length
@@ -281,10 +369,15 @@ def handle(self) -> int | None:
line += " " + description
self.line(line)
+
return None
def display_package_tree(
- self, io: IO, package: Package, installed_repo: Repository
+ self,
+ io: IO,
+ package: Package,
+ installed_repo: Repository,
+ why_package: Package | None = None,
) -> None:
io.write(f"{package.pretty_name}")
description = ""
@@ -293,8 +386,15 @@ def display_package_tree(
io.write_line(f" {package.pretty_version}{description}")
- dependencies = package.requires
- dependencies = sorted(dependencies, key=lambda x: x.name)
+ if why_package is not None:
+ dependencies = [p for p in package.requires if p.name == why_package.name]
+ else:
+ dependencies = package.requires
+ dependencies = sorted(
+ dependencies,
+ key=lambda x: x.name, # type: ignore[no-any-return]
+ )
+
tree_bar = "├"
total = len(dependencies)
for i, dependency in enumerate(dependencies, 1):
@@ -334,7 +434,10 @@ def _display_tree(
break
- dependencies = sorted(dependencies, key=lambda x: x.name)
+ dependencies = sorted(
+ dependencies,
+ key=lambda x: x.name, # type: ignore[no-any-return]
+ )
tree_bar = previous_tree_bar + " ├"
total = len(dependencies)
for i, dependency in enumerate(dependencies, 1):
@@ -383,7 +486,7 @@ def init_styles(self, io: IO) -> None:
def find_latest_package(
self, package: Package, root: ProjectPackage
- ) -> Package | bool:
+ ) -> Package | None:
from cleo.io.null_io import NullIO
from poetry.puzzle.provider import Provider
@@ -398,10 +501,13 @@ def find_latest_package(
provider = Provider(root, self.poetry.pool, NullIO())
if dep.is_vcs():
+ dep = cast(VCSDependency, dep)
return provider.search_for_vcs(dep)[0]
if dep.is_file():
+ dep = cast(FileDependency, dep)
return provider.search_for_file(dep)[0]
if dep.is_directory():
+ dep = cast(DirectoryDependency, dep)
return provider.search_for_directory(dep)[0]
name = package.name
diff --git a/src/poetry/console/commands/version.py b/src/poetry/console/commands/version.py
index 0ae4302ac5e..28b3c6815f3 100644
--- a/src/poetry/console/commands/version.py
+++ b/src/poetry/console/commands/version.py
@@ -1,9 +1,11 @@
from __future__ import annotations
from typing import TYPE_CHECKING
+from typing import Any
from cleo.helpers import argument
from cleo.helpers import option
+from tomlkit.toml_document import TOMLDocument
from poetry.console.commands.command import Command
@@ -27,7 +29,14 @@ class VersionCommand(Command):
optional=True,
)
]
- options = [option("short", "s", "Output the version number only")]
+ options = [
+ option("short", "s", "Output the version number only"),
+ option(
+ "dry-run",
+ None,
+ "Do not update pyproject.toml file",
+ ),
+ ]
help = """\
The version command shows the current version of the project or bumps the version of
@@ -64,11 +73,13 @@ def handle(self) -> None:
f" to {version}>"
)
- content = self.poetry.file.read()
- poetry_content = content["tool"]["poetry"]
- poetry_content["version"] = version.text
+ if not self.option("dry-run"):
+ content: dict[str, Any] = self.poetry.file.read()
+ poetry_content = content["tool"]["poetry"]
+ poetry_content["version"] = version.text
- self.poetry.file.write(content)
+ assert isinstance(content, TOMLDocument)
+ self.poetry.file.write(content)
else:
if self.option("short"):
self.line(self.poetry.package.pretty_version)
@@ -100,7 +111,9 @@ def increment_version(self, version: str, rule: str) -> Version:
new = new.first_prerelease()
elif rule == "prerelease":
if parsed.is_unstable():
- new = Version(parsed.epoch, parsed.release, parsed.pre.next())
+ pre = parsed.pre
+ assert pre is not None
+ new = Version(parsed.epoch, parsed.release, pre.next())
else:
new = parsed.next_patch().first_prerelease()
else:
diff --git a/src/poetry/console/exceptions.py b/src/poetry/console/exceptions.py
index f72903f757a..84223c41923 100644
--- a/src/poetry/console/exceptions.py
+++ b/src/poetry/console/exceptions.py
@@ -3,6 +3,6 @@
from cleo.exceptions import CleoSimpleException
-class PoetrySimpleConsoleException(CleoSimpleException):
+class PoetrySimpleConsoleException(CleoSimpleException): # type: ignore[misc]
pass
diff --git a/src/poetry/console/io/inputs/run_argv_input.py b/src/poetry/console/io/inputs/run_argv_input.py
index 964d88c2c17..b27f19cab37 100644
--- a/src/poetry/console/io/inputs/run_argv_input.py
+++ b/src/poetry/console/io/inputs/run_argv_input.py
@@ -9,7 +9,7 @@
from cleo.io.inputs.definition import Definition
-class RunArgvInput(ArgvInput):
+class RunArgvInput(ArgvInput): # type: ignore[misc]
def __init__(
self,
argv: list[str] | None = None,
diff --git a/src/poetry/factory.py b/src/poetry/factory.py
index 5ff5be0b632..3057729641d 100644
--- a/src/poetry/factory.py
+++ b/src/poetry/factory.py
@@ -1,29 +1,47 @@
from __future__ import annotations
-from pathlib import Path
+import contextlib
+import logging
+import re
+import warnings
+
from typing import TYPE_CHECKING
+from typing import Any
+from typing import cast
from cleo.io.null_io import NullIO
from poetry.core.factory import Factory as BaseFactory
+from poetry.core.packages.project_package import ProjectPackage
from poetry.core.toml.file import TOMLFile
+from tomlkit.toml_document import TOMLDocument
from poetry.config.config import Config
-from poetry.config.file_config_source import FileConfigSource
-from poetry.locations import CONFIG_DIR
from poetry.packages.locker import Locker
-from poetry.packages.project_package import ProjectPackage
from poetry.plugins.plugin import Plugin
from poetry.plugins.plugin_manager import PluginManager
from poetry.poetry import Poetry
+try:
+ from poetry.core.packages.dependency_group import MAIN_GROUP
+except ImportError:
+ MAIN_GROUP = "default"
+
+
if TYPE_CHECKING:
+ from pathlib import Path
+
from cleo.io.io import IO
+ from poetry.core.packages.package import Package
from poetry.repositories.legacy_repository import LegacyRepository
+ from poetry.utils.dependency_specification import DependencySpec
+
+logger = logging.getLogger(__name__)
-class Factory(BaseFactory):
+
+class Factory(BaseFactory): # type: ignore[misc]
"""
Factory class to create various elements needed by Poetry.
"""
@@ -33,6 +51,7 @@ def create_poetry(
cwd: Path | None = None,
io: IO | None = None,
disable_plugins: bool = False,
+ disable_cache: bool = False,
) -> Poetry:
if io is None:
io = NullIO()
@@ -44,7 +63,12 @@ def create_poetry(
)
# Loading global configuration
- config = self.create_config(io)
+ with warnings.catch_warnings():
+ # this is preserved to ensure export plugin tests pass in ci,
+ # once poetry-plugin-export version is updated to use one that do not
+ # use Factory.create_config(), this can be safely removed.
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ config = self.create_config()
# Loading local configuration
local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
@@ -75,7 +99,11 @@ def create_poetry(
# Configuring sources
self.configure_sources(
- poetry, poetry.local_config.get("source", []), config, io
+ poetry,
+ poetry.local_config.get("source", []),
+ config,
+ io,
+ disable_cache=disable_cache,
)
plugin_manager = PluginManager(Plugin.group, disable_plugins=disable_plugins)
@@ -91,42 +119,30 @@ def get_package(cls, name: str, version: str) -> ProjectPackage:
@classmethod
def create_config(cls, io: IO | None = None) -> Config:
- if io is None:
- io = NullIO()
-
- config = Config()
- # Load global config
- config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
- if config_file.exists():
- if io.is_debug():
- io.write_line(
- f"Loading configuration file {config_file.path}"
- )
-
- config.merge(config_file.read())
-
- config.set_config_source(FileConfigSource(config_file))
-
- # Load global auth config
- auth_config_file = TOMLFile(Path(CONFIG_DIR) / "auth.toml")
- if auth_config_file.exists():
- if io.is_debug():
- io.write_line(
- f"Loading configuration file {auth_config_file.path}"
- )
-
- config.merge(auth_config_file.read())
-
- config.set_auth_config_source(FileConfigSource(auth_config_file))
-
- return config
+ if io is not None:
+ logger.debug("Ignoring provided io when creating config.")
+ warnings.warn(
+ "Use of Factory.create_config() is deprecated, use Config.create() instead",
+ DeprecationWarning,
+ )
+ return Config.create()
@classmethod
def configure_sources(
- cls, poetry: Poetry, sources: list[dict[str, str]], config: Config, io: IO
+ cls,
+ poetry: Poetry,
+ sources: list[dict[str, str]],
+ config: Config,
+ io: IO,
+ disable_cache: bool = False,
) -> None:
+ if disable_cache:
+ logger.debug("Disabling source caches")
+
for source in sources:
- repository = cls.create_legacy_repository(source, config)
+ repository = cls.create_package_source(
+ source, config, disable_cache=disable_cache
+ )
is_default = bool(source.get("default", False))
is_secondary = bool(source.get("secondary", False))
if io.is_debug():
@@ -150,15 +166,16 @@ def configure_sources(
from poetry.repositories.pypi_repository import PyPiRepository
default = not poetry.pool.has_primary_repositories()
- poetry.pool.add_repository(PyPiRepository(), default, not default)
+ poetry.pool.add_repository(
+ PyPiRepository(disable_cache=disable_cache), default, not default
+ )
@classmethod
- def create_legacy_repository(
- cls, source: dict[str, str], auth_config: Config
+ def create_package_source(
+ cls, source: dict[str, str], auth_config: Config, disable_cache: bool = False
) -> LegacyRepository:
from poetry.repositories.legacy_repository import LegacyRepository
- from poetry.utils.helpers import get_cert
- from poetry.utils.helpers import get_client_cert
+ from poetry.repositories.single_page_repository import SinglePageRepository
if "url" not in source:
raise RuntimeError("Unsupported source specified")
@@ -169,54 +186,128 @@ def create_legacy_repository(
name = source["name"]
url = source["url"]
- return LegacyRepository(
+ repository_class = LegacyRepository
+
+ if re.match(r".*\.(htm|html)$", url):
+ repository_class = SinglePageRepository
+
+ return repository_class(
name,
url,
config=auth_config,
- cert=get_cert(auth_config, name),
- client_cert=get_client_cert(auth_config, name),
+ disable_cache=disable_cache,
)
@classmethod
- def create_pyproject_from_package(cls, package: ProjectPackage, path: Path) -> None:
+ def create_pyproject_from_package(
+ cls, package: Package, path: Path | None = None
+ ) -> TOMLDocument:
import tomlkit
- from poetry.layouts.layout import POETRY_DEFAULT
+ from poetry.utils.dependency_specification import dependency_to_specification
+
+ pyproject: dict[str, Any] = tomlkit.document()
- pyproject = tomlkit.loads(POETRY_DEFAULT)
- content = pyproject["tool"]["poetry"]
+ tool_table = tomlkit.table()
+ tool_table._is_super_table = True
+ pyproject["tool"] = tool_table
+
+ content: dict[str, Any] = tomlkit.table()
+ pyproject["tool"]["poetry"] = content
content["name"] = package.name
content["version"] = package.version.text
content["description"] = package.description
content["authors"] = package.authors
+ content["license"] = package.license.id if package.license else ""
+
+ if package.classifiers:
+ content["classifiers"] = package.classifiers
+
+ for key, attr in {
+ ("documentation", "documentation_url"),
+ ("repository", "repository_url"),
+ ("homepage", "homepage"),
+ ("maintainers", "maintainers"),
+ ("keywords", "keywords"),
+ }:
+ value = getattr(package, attr, None)
+ if value:
+ content[key] = value
+
+ readmes = []
+
+ for readme in package.readmes:
+ readme_posix_path = readme.as_posix()
+
+ with contextlib.suppress(ValueError):
+ if package.root_dir:
+ readme_posix_path = readme.relative_to(package.root_dir).as_posix()
+
+ readmes.append(readme_posix_path)
- dependency_section = content["dependencies"]
+ if readmes:
+ content["readme"] = readmes
+
+ optional_dependencies = set()
+ extras_section = None
+
+ if package.extras:
+ extras_section = tomlkit.table()
+
+ for extra in package.extras:
+ _dependencies = []
+ for dependency in package.extras[extra]:
+ _dependencies.append(dependency.name)
+ optional_dependencies.add(dependency.name)
+
+ extras_section[extra] = _dependencies
+
+ optional_dependencies = set(optional_dependencies)
+ dependency_section = content["dependencies"] = tomlkit.table()
dependency_section["python"] = package.python_versions
- for dep in package.requires:
- constraint = tomlkit.inline_table()
- if dep.is_vcs():
- constraint[dep.vcs] = dep.source_url
+ for dep in package.all_requires:
+ constraint: DependencySpec | str = dependency_to_specification(
+ dep, tomlkit.inline_table()
+ )
- if dep.reference:
- constraint["rev"] = dep.reference
- elif dep.is_file() or dep.is_directory():
- constraint["path"] = dep.source_url
- else:
- constraint["version"] = dep.pretty_constraint
+ if not isinstance(constraint, str):
+ if dep.name in optional_dependencies:
+ constraint["optional"] = True
- if not dep.marker.is_any():
- constraint["markers"] = str(dep.marker)
+ if len(constraint) == 1 and "version" in constraint:
+ constraint = cast(str, constraint["version"])
+ elif not constraint:
+ constraint = "*"
- if dep.extras:
- constraint["extras"] = sorted(dep.extras)
+ for group in dep.groups:
+ if group == MAIN_GROUP:
+ dependency_section[dep.name] = constraint
+ else:
+ if "group" not in content:
+ _table = tomlkit.table()
+ _table._is_super_table = True
+ content["group"] = _table
- if len(constraint) == 1 and "version" in constraint:
- constraint = constraint["version"]
+ if group not in content["group"]:
+ _table = tomlkit.table()
+ _table._is_super_table = True
+ content["group"][group] = _table
- dependency_section[dep.name] = constraint
+ if "dependencies" not in content["group"][group]:
+ content["group"][group]["dependencies"] = tomlkit.table()
- path.joinpath("pyproject.toml").write_text(
- pyproject.as_string(), encoding="utf-8"
- )
+ content["group"][group]["dependencies"][dep.name] = constraint
+
+ if extras_section:
+ content["extras"] = extras_section
+
+ pyproject.add(tomlkit.nl()) # type: ignore[attr-defined]
+
+ if path:
+ path.joinpath("pyproject.toml").write_text(
+ pyproject.as_string(), encoding="utf-8" # type: ignore[attr-defined]
+ )
+
+ return cast(TOMLDocument, pyproject)
diff --git a/src/poetry/inspection/info.py b/src/poetry/inspection/info.py
index 656bbb63a47..ea4c56f88c9 100644
--- a/src/poetry/inspection/info.py
+++ b/src/poetry/inspection/info.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import functools
import glob
import logging
import os
@@ -8,7 +9,10 @@
from pathlib import Path
from typing import TYPE_CHECKING
+from typing import Any
+from typing import ContextManager
from typing import Iterator
+from typing import cast
import pkginfo
@@ -26,6 +30,8 @@
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.core.packages.project_package import ProjectPackage
@@ -71,7 +77,7 @@ def __init__(
requires_python: str | None = None,
files: list[dict[str, str]] | None = None,
cache_version: str | None = None,
- ):
+ ) -> None:
self.name = name
self.version = version
self.summary = summary
@@ -80,9 +86,9 @@ def __init__(
self.requires_python = requires_python
self.files = files or []
self._cache_version = cache_version
- self._source_type = None
- self._source_url = None
- self._source_reference = None
+ self._source_type: str | None = None
+ self._source_url: str | None = None
+ self._source_reference: str | None = None
@property
def cache_version(self) -> str | None:
@@ -99,7 +105,7 @@ def update(self, other: PackageInfo) -> PackageInfo:
self._cache_version = other.cache_version or self._cache_version
return self
- def asdict(self) -> dict[str, str | list[str] | None]:
+ def asdict(self) -> dict[str, Any]:
"""
Helper method to convert package info into a dictionary used for caching.
"""
@@ -115,7 +121,7 @@ def asdict(self) -> dict[str, str | list[str] | None]:
}
@classmethod
- def load(cls, data: dict[str, str | list[str] | None]) -> PackageInfo:
+ def load(cls, data: dict[str, Any]) -> PackageInfo:
"""
Helper method to load data from a dictionary produced by `PackageInfo.asdict()`.
@@ -168,7 +174,9 @@ def to_package(
if root_dir or (self._source_type in {"directory"} and self._source_url):
# this is a local poetry project, this means we can extract "richer"
# requirement information, eg: development requirements etc.
- poetry_package = self._get_poetry_package(path=root_dir or self._source_url)
+ poetry_package = self._get_poetry_package(
+ path=root_dir or Path(cast(str, self._source_url))
+ )
if poetry_package:
package.extras = poetry_package.extras
for dependency in poetry_package.requires:
@@ -273,6 +281,7 @@ def _from_sdist_file(cls, path: Path) -> PackageInfo:
# So, we unpack and introspect
suffix = path.suffix
+ context: Callable[[str], ContextManager[zipfile.ZipFile | tarfile.TarFile]]
if suffix == ".zip":
context = zipfile.ZipFile
else:
@@ -285,8 +294,8 @@ def _from_sdist_file(cls, path: Path) -> PackageInfo:
context = tarfile.open
- with temporary_directory() as tmp:
- tmp = Path(tmp)
+ with temporary_directory() as tmp_str:
+ tmp = Path(tmp_str)
with context(path.as_posix()) as archive:
archive.extractall(tmp.as_posix())
@@ -393,7 +402,7 @@ def from_metadata(cls, path: Path) -> PackageInfo | None:
if path.suffix in {".dist-info", ".egg-info"}:
directories = [path]
else:
- directories = cls._find_dist_info(path=path)
+ directories = list(cls._find_dist_info(path=path))
for directory in directories:
try:
@@ -449,76 +458,6 @@ def _get_poetry_package(path: Path) -> ProjectPackage | None:
return Factory().create_poetry(path).package
return None
- @classmethod
- def _pep517_metadata(cls, path: Path) -> PackageInfo:
- """
- Helper method to use PEP-517 library to build and read package metadata.
-
- :param path: Path to package source to build and read metadata for.
- """
- info = None
- try:
- info = cls.from_setup_files(path)
- if all([info.version, info.name, info.requires_dist]):
- return info
- except PackageInfoError:
- pass
-
- with ephemeral_environment(
- with_pip=True, with_wheel=True, with_setuptools=True
- ) as venv:
- # TODO: cache PEP 517 build environment corresponding to each project venv
- dest_dir = venv.path.parent / "dist"
- dest_dir.mkdir()
-
- pep517_meta_build_script = PEP517_META_BUILD.format(
- source=path.as_posix(), dest=dest_dir.as_posix()
- )
-
- try:
- venv.run_pip(
- "install",
- "--disable-pip-version-check",
- "--ignore-installed",
- *PEP517_META_BUILD_DEPS,
- )
- venv.run(
- "python",
- "-",
- input_=pep517_meta_build_script,
- )
- return cls.from_metadata(dest_dir)
- except EnvCommandError as e:
- # something went wrong while attempting pep517 metadata build
- # fallback to egg_info if setup.py available
- cls._log(f"PEP517 build failed: {e}", level="debug")
- setup_py = path / "setup.py"
- if not setup_py.exists():
- raise PackageInfoError(
- path,
- e,
- "No fallback setup.py file was found to generate egg_info.",
- )
-
- cwd = Path.cwd()
- os.chdir(path.as_posix())
- try:
- venv.run("python", "setup.py", "egg_info")
- return cls.from_metadata(path)
- except EnvCommandError as fbe:
- raise PackageInfoError(
- path, "Fallback egg_info generation failed.", fbe
- )
- finally:
- os.chdir(cwd.as_posix())
-
- if info:
- cls._log(f"Falling back to parsed setup.py file for {path}", "debug")
- return info
-
- # if we reach here, everything has failed and all hope is lost
- raise PackageInfoError(path, "Exhausted all core metadata sources.")
-
@classmethod
def from_directory(cls, path: Path, disable_build: bool = False) -> PackageInfo:
"""
@@ -532,6 +471,7 @@ def from_directory(cls, path: Path, disable_build: bool = False) -> PackageInfo:
build is attempted in order to gather metadata.
"""
project_package = cls._get_poetry_package(path)
+ info: PackageInfo | None
if project_package:
info = cls.from_package(project_package)
else:
@@ -542,13 +482,14 @@ def from_directory(cls, path: Path, disable_build: bool = False) -> PackageInfo:
if disable_build:
info = cls.from_setup_files(path)
else:
- info = cls._pep517_metadata(path)
+ info = get_pep517_metadata(path)
except PackageInfoError:
if not info:
raise
# we discovered PkgInfo but no requirements were listed
+ assert info
info._source_type = "directory"
info._source_url = path.as_posix()
@@ -609,3 +550,74 @@ def from_path(cls, path: Path) -> PackageInfo:
return cls.from_bdist(path=path)
except PackageInfoError:
return cls.from_sdist(path=path)
+
+
+@functools.lru_cache(maxsize=None)
+def get_pep517_metadata(path: Path) -> PackageInfo:
+ """
+ Helper method to use PEP-517 library to build and read package metadata.
+
+ :param path: Path to package source to build and read metadata for.
+ """
+ info = None
+ try:
+ info = PackageInfo.from_setup_files(path)
+ if all([info.version, info.name, info.requires_dist]):
+ return info
+ except PackageInfoError:
+ pass
+
+ with ephemeral_environment(
+ flags={"no-pip": False, "no-setuptools": False, "no-wheel": False}
+ ) as venv:
+ # TODO: cache PEP 517 build environment corresponding to each project venv
+ dest_dir = venv.path.parent / "dist"
+ dest_dir.mkdir()
+
+ pep517_meta_build_script = PEP517_META_BUILD.format(
+ source=path.as_posix(), dest=dest_dir.as_posix()
+ )
+
+ try:
+ venv.run_pip(
+ "install",
+ "--disable-pip-version-check",
+ "--ignore-installed",
+ *PEP517_META_BUILD_DEPS,
+ )
+ venv.run(
+ "python",
+ "-",
+ input_=pep517_meta_build_script,
+ )
+ info = PackageInfo.from_metadata(dest_dir)
+ except EnvCommandError as e:
+ # something went wrong while attempting pep517 metadata build
+ # fallback to egg_info if setup.py available
+ logger.debug("PEP517 build failed: %s", e)
+ setup_py = path / "setup.py"
+ if not setup_py.exists():
+ raise PackageInfoError(
+ path,
+ e,
+ "No fallback setup.py file was found to generate egg_info.",
+ )
+
+ cwd = Path.cwd()
+ os.chdir(path.as_posix())
+ try:
+ venv.run("python", "setup.py", "egg_info")
+ info = PackageInfo.from_metadata(path)
+ except EnvCommandError as fbe:
+ raise PackageInfoError(
+ path, "Fallback egg_info generation failed.", fbe
+ )
+ finally:
+ os.chdir(cwd.as_posix())
+
+ if info:
+ logger.debug("Falling back to parsed setup.py file for %s", path)
+ return info
+
+ # if we reach here, everything has failed and all hope is lost
+ raise PackageInfoError(path, "Exhausted all core metadata sources.")
diff --git a/src/poetry/installation/__init__.py b/src/poetry/installation/__init__.py
index b7bc1c52e31..42ff15e3a35 100644
--- a/src/poetry/installation/__init__.py
+++ b/src/poetry/installation/__init__.py
@@ -1,3 +1,6 @@
from __future__ import annotations
from poetry.installation.installer import Installer
+
+
+__all__ = ["Installer"]
diff --git a/src/poetry/installation/chef.py b/src/poetry/installation/chef.py
index 0cf7b54e3b5..51cad799ab7 100644
--- a/src/poetry/installation/chef.py
+++ b/src/poetry/installation/chef.py
@@ -51,7 +51,7 @@ def get_cached_archive_for_link(self, link: Link) -> Link | None:
if not archives:
return link
- candidates = []
+ candidates: list[tuple[float | None, Link]] = []
for archive in archives:
if not archive.is_wheel:
candidates.append((float("inf"), archive))
diff --git a/src/poetry/installation/chooser.py b/src/poetry/installation/chooser.py
index e51610f455e..ebfbbdaa765 100644
--- a/src/poetry/installation/chooser.py
+++ b/src/poetry/installation/chooser.py
@@ -4,15 +4,19 @@
import re
from typing import TYPE_CHECKING
+from typing import Any
from packaging.tags import Tag
+from poetry.config.config import Config
+from poetry.config.config import PackageFilterPolicy
from poetry.utils.patterns import wheel_file_re
if TYPE_CHECKING:
from poetry.core.packages.package import Package
from poetry.core.packages.utils.link import Link
+ from poetry.core.semver.version import Version
from poetry.repositories.pool import Pool
from poetry.utils.env import Env
@@ -57,9 +61,13 @@ class Chooser:
A Chooser chooses an appropriate release archive for packages.
"""
- def __init__(self, pool: Pool, env: Env) -> None:
+ def __init__(self, pool: Pool, env: Env, config: Config | None = None) -> None:
self._pool = pool
self._env = env
+ self._config = config or Config.create()
+ self._no_binary_policy: PackageFilterPolicy = PackageFilterPolicy(
+ self._config.get("installer.no-binary", [])
+ )
def choose_for(self, package: Package) -> Link:
"""
@@ -67,15 +75,23 @@ def choose_for(self, package: Package) -> Link:
"""
links = []
for link in self._get_links(package):
- if link.is_wheel and not Wheel(link.filename).is_supported_by_environment(
- self._env
- ):
- logger.debug(
- "Skipping wheel %s as this is not supported by the current"
- " environment",
- link.filename,
- )
- continue
+ if link.is_wheel:
+ if not self._no_binary_policy.allows(package.name):
+ logger.debug(
+ "Skipping wheel for %s as requested in no binary policy for"
+ " package (%s)",
+ link.filename,
+ package.name,
+ )
+ continue
+
+ if not Wheel(link.filename).is_supported_by_environment(self._env):
+ logger.debug(
+ "Skipping wheel %s as this is not supported by the current"
+ " environment",
+ link.filename,
+ )
+ continue
if link.ext in {".egg", ".exe", ".msi", ".rpm", ".srpm"}:
logger.debug("Skipping unsupported distribution %s", link.filename)
@@ -130,7 +146,9 @@ def _get_links(self, package: Package) -> list[Link]:
return selected_links
- def _sort_key(self, package: Package, link: Link) -> tuple:
+ def _sort_key(
+ self, package: Package, link: Link
+ ) -> tuple[int, int, int, Version, tuple[Any, ...], int]:
"""
Function to pass as the `key` argument to a call to sorted() to sort
InstallationCandidates by preference.
@@ -154,7 +172,7 @@ def _sort_key(self, package: Package, link: Link) -> tuple:
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
- build_tag = ()
+ build_tag: tuple[Any, ...] = ()
binary_preference = 0
if link.is_wheel:
wheel = Wheel(link.filename)
@@ -165,9 +183,11 @@ def _sort_key(self, package: Package, link: Link) -> tuple:
)
# TODO: Binary preference
- pri = -(wheel.get_minimum_supported_index(self._env.supported_tags))
+ pri = -(wheel.get_minimum_supported_index(self._env.supported_tags) or 0)
if wheel.build_tag is not None:
match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
+ if not match:
+ raise ValueError(f"Unable to parse build tag: {wheel.build_tag}")
build_tag_groups = match.groups()
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
else: # sdist
diff --git a/src/poetry/installation/executor.py b/src/poetry/installation/executor.py
index 16c85006ba6..3d64507a191 100644
--- a/src/poetry/installation/executor.py
+++ b/src/poetry/installation/executor.py
@@ -12,6 +12,7 @@
from subprocess import CalledProcessError
from typing import TYPE_CHECKING
from typing import Any
+from typing import cast
from cleo.io.null_io import NullIO
from poetry.core.packages.file_dependency import FileDependency
@@ -21,24 +22,24 @@
from poetry.installation.chef import Chef
from poetry.installation.chooser import Chooser
+from poetry.installation.operations import Install
+from poetry.installation.operations import Uninstall
+from poetry.installation.operations import Update
from poetry.utils._compat import decode
from poetry.utils.authenticator import Authenticator
from poetry.utils.env import EnvCommandError
from poetry.utils.helpers import pluralize
-from poetry.utils.helpers import safe_rmtree
+from poetry.utils.helpers import remove_directory
from poetry.utils.pip import pip_install
if TYPE_CHECKING:
from cleo.io.io import IO
+ from cleo.io.outputs.section_output import SectionOutput
from poetry.core.packages.package import Package
from poetry.config.config import Config
- from poetry.installation.operations import OperationTypes
- from poetry.installation.operations.install import Install
from poetry.installation.operations.operation import Operation
- from poetry.installation.operations.uninstall import Uninstall
- from poetry.installation.operations.update import Update
from poetry.repositories import Pool
from poetry.utils.env import Env
@@ -50,7 +51,7 @@ def __init__(
pool: Pool,
config: Config,
io: IO,
- parallel: bool = None,
+ parallel: bool | None = None,
) -> None:
self._env = env
self._io = io
@@ -59,7 +60,7 @@ def __init__(
self._verbose = False
self._authenticator = Authenticator(config, self._io)
self._chef = Chef(config, self._env)
- self._chooser = Chooser(pool, self._env)
+ self._chooser = Chooser(pool, self._env, config)
if parallel is None:
parallel = config.get("installer.parallel", True)
@@ -76,7 +77,7 @@ def __init__(
self._executed_operations = 0
self._executed = {"install": 0, "update": 0, "uninstall": 0}
self._skipped = {"install": 0, "update": 0, "uninstall": 0}
- self._sections = {}
+ self._sections: dict[int, SectionOutput] = {}
self._lock = threading.Lock()
self._shutdown = False
self._hashes: dict[str, str] = {}
@@ -127,7 +128,7 @@ def pip_install(
return 0
- def execute(self, operations: list[OperationTypes]) -> int:
+ def execute(self, operations: list[Operation]) -> int:
self._total_operations = len(operations)
for job_type in self._executed:
self._executed[job_type] = 0
@@ -187,7 +188,7 @@ def _get_max_workers(desired_max_workers: int | None = None) -> int:
# (it raises a NotImplementedError), so, in this case, we assume
# that the system only has one CPU.
try:
- default_max_workers = os.cpu_count() + 4
+ default_max_workers = (os.cpu_count() or 1) + 4
except NotImplementedError:
default_max_workers = 5
@@ -195,7 +196,7 @@ def _get_max_workers(desired_max_workers: int | None = None) -> int:
return default_max_workers
return min(default_max_workers, desired_max_workers)
- def _write(self, operation: OperationTypes, line: str) -> None:
+ def _write(self, operation: Operation, line: str) -> None:
if not self.supports_fancy_output() or not self._should_write_operation(
operation
):
@@ -213,7 +214,7 @@ def _write(self, operation: OperationTypes, line: str) -> None:
section.clear()
section.write(line)
- def _execute_operation(self, operation: OperationTypes) -> None:
+ def _execute_operation(self, operation: Operation) -> None:
try:
op_message = self.get_operation_message(operation)
if self.supports_fancy_output():
@@ -290,7 +291,7 @@ def _execute_operation(self, operation: OperationTypes) -> None:
with self._lock:
self._shutdown = True
- def _do_execute_operation(self, operation: OperationTypes) -> int:
+ def _do_execute_operation(self, operation: Operation) -> int:
method = operation.job_type
operation_message = self.get_operation_message(operation)
@@ -313,7 +314,7 @@ def _do_execute_operation(self, operation: OperationTypes) -> int:
return 0
- result = getattr(self, f"_execute_{method}")(operation)
+ result: int = getattr(self, f"_execute_{method}")(operation)
if result != 0:
return result
@@ -326,9 +327,7 @@ def _do_execute_operation(self, operation: OperationTypes) -> int:
return result
- def _increment_operations_count(
- self, operation: OperationTypes, executed: bool
- ) -> None:
+ def _increment_operations_count(self, operation: Operation, executed: bool) -> None:
with self._lock:
if executed:
self._executed_operations += 1
@@ -353,7 +352,7 @@ def run_pip(self, *args: Any, **kwargs: Any) -> int:
def get_operation_message(
self,
- operation: OperationTypes,
+ operation: Operation,
done: bool = False,
error: bool = False,
warning: bool = False,
@@ -376,21 +375,21 @@ def get_operation_message(
source_operation_color += "_dark"
package_color += "_dark"
- if operation.job_type == "install":
+ if isinstance(operation, Install):
return (
f"<{base_tag}>Installing"
f" <{package_color}>{operation.package.name}{package_color}>"
f" (<{operation_color}>{operation.package.full_pretty_version}>)>"
)
- if operation.job_type == "uninstall":
+ if isinstance(operation, Uninstall):
return (
f"<{base_tag}>Removing"
f" <{package_color}>{operation.package.name}{package_color}>"
f" (<{operation_color}>{operation.package.full_pretty_version}>)>"
)
- if operation.job_type == "update":
+ if isinstance(operation, Update):
return (
f"<{base_tag}>Updating"
f" <{package_color}>{operation.initial_package.name}{package_color}> "
@@ -401,7 +400,7 @@ def get_operation_message(
)
return ""
- def _display_summary(self, operations: list[OperationTypes]) -> None:
+ def _display_summary(self, operations: list[Operation]) -> None:
installs = 0
updates = 0
uninstalls = 0
@@ -488,7 +487,7 @@ def _remove(self, operation: Uninstall) -> int:
if package.source_type == "git":
src_dir = self._env.path / "src" / package.name
if src_dir.exists():
- safe_rmtree(str(src_dir))
+ remove_directory(src_dir, force=True)
try:
return self.run_pip("uninstall", package.name, "-y")
@@ -576,7 +575,7 @@ def _install_directory(self, operation: Install | Update) -> int:
return self.pip_install(req, upgrade=True)
def _install_git(self, operation: Install | Update) -> int:
- from poetry.core.vcs import Git
+ from poetry.vcs.git import Git
package = operation.package
operation_message = self.get_operation_message(operation)
@@ -586,24 +585,15 @@ def _install_git(self, operation: Install | Update) -> int:
)
self._write(operation, message)
- src_dir = self._env.path / "src" / package.name
- if src_dir.exists():
- safe_rmtree(str(src_dir))
-
- src_dir.parent.mkdir(exist_ok=True)
-
- git = Git()
- git.clone(package.source_url, src_dir)
-
- reference = package.source_resolved_reference
- if not reference:
- reference = package.source_reference
-
- git.checkout(reference, src_dir)
+ source = Git.clone(
+ url=package.source_url,
+ source_root=self._env.path / "src",
+ revision=package.source_resolved_reference or package.source_reference,
+ )
# Now we just need to install from the source directory
original_url = package.source_url
- package._source_url = str(src_dir)
+ package._source_url = str(source.path)
status_code = self._install_directory(operation)
@@ -655,7 +645,7 @@ def _validate_archive_hash(archive: Path | Link, package: Package) -> str:
package.name,
archive_path,
)
- archive_hash = "sha256:" + file_dep.hash()
+ archive_hash: str = "sha256:" + file_dep.hash()
known_hashes = {f["hash"] for f in package.files}
if archive_hash not in known_hashes:
@@ -689,10 +679,11 @@ def _download_archive(self, operation: Install | Update, link: Link) -> Path:
if progress:
with self._lock:
+ self._sections[id(operation)].clear()
progress.start()
done = 0
- archive = self._chef.get_cache_directory_for_link(link) / link.filename
+ archive: Path = self._chef.get_cache_directory_for_link(link) / link.filename
archive.parent.mkdir(parents=True, exist_ok=True)
with archive.open("wb") as f:
for chunk in response.iter_content(chunk_size=4096):
@@ -716,7 +707,7 @@ def _download_archive(self, operation: Install | Update, link: Link) -> Path:
def _should_write_operation(self, operation: Operation) -> bool:
return not operation.skipped or self._dry_run or self._verbose
- def _save_url_reference(self, operation: OperationTypes) -> None:
+ def _save_url_reference(self, operation: Operation) -> None:
"""
Create and store a PEP-610 `direct_url.json` file, if needed.
"""
@@ -741,7 +732,7 @@ def _save_url_reference(self, operation: OperationTypes) -> None:
direct_url_json.unlink()
return
- url_reference = None
+ url_reference: dict[str, Any] | None = None
if package.source_type == "git":
url_reference = self._create_git_url_reference(package)
@@ -756,26 +747,16 @@ def _save_url_reference(self, operation: OperationTypes) -> None:
for dist in self._env.site_packages.distributions(
name=package.name, writable_only=True
):
- dist._path.joinpath("direct_url.json").write_text(
- json.dumps(url_reference),
- encoding="utf-8",
- )
+ dist_path = cast(Path, dist._path) # type: ignore[attr-defined]
+ url = dist_path / "direct_url.json"
+ url.write_text(json.dumps(url_reference), encoding="utf-8")
- record = dist._path.joinpath("RECORD")
+ record = dist_path / "RECORD"
if record.exists():
with record.open(mode="a", encoding="utf-8") as f:
writer = csv.writer(f)
- writer.writerow(
- [
- str(
- dist._path.joinpath("direct_url.json").relative_to(
- record.parent.parent
- )
- ),
- "",
- "",
- ]
- )
+ path = url.relative_to(record.parent.parent)
+ writer.writerow([str(path), "", ""])
def _create_git_url_reference(
self, package: Package
@@ -811,24 +792,20 @@ def _create_file_url_reference(
if package.name in self._hashes:
archive_info["hash"] = self._hashes[package.name]
- reference = {
+ return {
"url": Path(package.source_url).as_uri(),
"archive_info": archive_info,
}
- return reference
-
def _create_directory_url_reference(
self, package: Package
- ) -> dict[str, str | dict[str, str]]:
+ ) -> dict[str, str | dict[str, bool]]:
dir_info = {}
if package.develop:
dir_info["editable"] = True
- reference = {
+ return {
"url": Path(package.source_url).as_uri(),
"dir_info": dir_info,
}
-
- return reference
diff --git a/src/poetry/installation/installer.py b/src/poetry/installation/installer.py
index 4a63e0767aa..b033e71f53c 100644
--- a/src/poetry/installation/installer.py
+++ b/src/poetry/installation/installer.py
@@ -25,7 +25,6 @@
from poetry.config.config import Config
from poetry.installation.base_installer import BaseInstaller
- from poetry.installation.operations import OperationTypes
from poetry.installation.operations.operation import Operation
from poetry.packages import Locker
from poetry.utils.env import Env
@@ -42,7 +41,7 @@ def __init__(
config: Config,
installed: Repository | None = None,
executor: Executor | None = None,
- ):
+ ) -> None:
self._io = io
self._env = env
self._package = package
@@ -59,9 +58,9 @@ def __init__(
self._execute_operations = True
self._lock = False
- self._whitelist = []
+ self._whitelist: list[str] = []
- self._extras = []
+ self._extras: list[str] = []
if executor is None:
executor = Executor(self._env, self._pool, config, self._io)
@@ -172,7 +171,7 @@ def whitelist(self, packages: Iterable[str]) -> Installer:
return self
- def extras(self, extras: list) -> Installer:
+ def extras(self, extras: list[str]) -> Installer:
self._extras = extras
return self
@@ -183,7 +182,7 @@ def use_executor(self, use_executor: bool = True) -> Installer:
return self
def _do_refresh(self) -> int:
- from poetry.puzzle import Solver
+ from poetry.puzzle.solver import Solver
# Checking extras
for extra in self._extras:
@@ -199,7 +198,10 @@ def _do_refresh(self) -> int:
self._io,
)
- ops = solver.solve(use_latest=[]).calculate_operations()
+ with solver.provider.use_source_root(
+ source_root=self._env.path.joinpath("src")
+ ):
+ ops = solver.solve(use_latest=[]).calculate_operations()
local_repo = Repository()
self._populate_local_repo(local_repo, ops)
@@ -209,7 +211,7 @@ def _do_refresh(self) -> int:
return 0
def _do_install(self, local_repo: Repository) -> int:
- from poetry.puzzle import Solver
+ from poetry.puzzle.solver import Solver
locked_repository = Repository()
if self._update:
@@ -236,7 +238,10 @@ def _do_install(self, local_repo: Repository) -> int:
self._io,
)
- ops = solver.solve(use_latest=self._whitelist).calculate_operations()
+ with solver.provider.use_source_root(
+ source_root=self._env.path.joinpath("src")
+ ):
+ ops = solver.solve(use_latest=self._whitelist).calculate_operations()
else:
self._io.write_line("Installing dependencies from lock file>")
@@ -339,7 +344,7 @@ def _write_lock_file(self, repo: Repository, force: bool = True) -> None:
self._io.write_line("")
self._io.write_line("Writing lock file>")
- def _execute(self, operations: list[OperationTypes]) -> int:
+ def _execute(self, operations: list[Operation]) -> int:
if self._use_executor:
return self._executor.execute(operations)
@@ -470,9 +475,9 @@ def _populate_local_repo(
def _get_operations_from_lock(
self, locked_repository: Repository
- ) -> Sequence[Operation]:
+ ) -> list[Operation]:
installed_repo = self._installed_repository
- ops = []
+ ops: list[Operation] = []
extra_packages = self._get_extra_packages(locked_repository)
for locked in locked_repository.packages:
diff --git a/src/poetry/installation/noop_installer.py b/src/poetry/installation/noop_installer.py
index 84416cf4207..0a994f1484b 100644
--- a/src/poetry/installation/noop_installer.py
+++ b/src/poetry/installation/noop_installer.py
@@ -12,7 +12,7 @@
class NoopInstaller(BaseInstaller):
def __init__(self) -> None:
self._installs: list[Package] = []
- self._updates: list[Package] = []
+ self._updates: list[tuple[Package, Package]] = []
self._removals: list[Package] = []
@property
@@ -20,7 +20,7 @@ def installs(self) -> list[Package]:
return self._installs
@property
- def updates(self) -> list[Package]:
+ def updates(self) -> list[tuple[Package, Package]]:
return self._updates
@property
diff --git a/src/poetry/installation/operations/__init__.py b/src/poetry/installation/operations/__init__.py
index d3c2db5cced..d579ac6b8a0 100644
--- a/src/poetry/installation/operations/__init__.py
+++ b/src/poetry/installation/operations/__init__.py
@@ -1,10 +1,8 @@
from __future__ import annotations
-from typing import Union
-
from poetry.installation.operations.install import Install
from poetry.installation.operations.uninstall import Uninstall
from poetry.installation.operations.update import Update
-OperationTypes = Union[Install, Uninstall, Update]
+__all__ = ["Install", "Uninstall", "Update"]
diff --git a/src/poetry/installation/operations/operation.py b/src/poetry/installation/operations/operation.py
index 99c12c39632..816618a1b8d 100644
--- a/src/poetry/installation/operations/operation.py
+++ b/src/poetry/installation/operations/operation.py
@@ -43,7 +43,8 @@ def package(self) -> Package:
raise NotImplementedError()
def format_version(self, package: Package) -> str:
- return package.full_pretty_version
+ version: str = package.full_pretty_version
+ return version
def skip(self: T, reason: str) -> T:
self._skipped = True
diff --git a/src/poetry/installation/pip_installer.py b/src/poetry/installation/pip_installer.py
index 86dbeea719c..ae54ff73af5 100644
--- a/src/poetry/installation/pip_installer.py
+++ b/src/poetry/installation/pip_installer.py
@@ -12,8 +12,9 @@
from poetry.core.pyproject.toml import PyProjectTOML
from poetry.installation.base_installer import BaseInstaller
+from poetry.repositories.http import HTTPRepository
from poetry.utils._compat import encode
-from poetry.utils.helpers import safe_rmtree
+from poetry.utils.helpers import remove_directory
from poetry.utils.pip import pip_install
@@ -57,27 +58,33 @@ def install(self, package: Package, update: bool = False) -> None:
)
args += ["--trusted-host", parsed.hostname]
- if repository.cert:
- args += ["--cert", str(repository.cert)]
+ if isinstance(repository, HTTPRepository):
+ if repository.cert:
+ args += ["--cert", str(repository.cert)]
- if repository.client_cert:
- args += ["--client-cert", str(repository.client_cert)]
+ if repository.client_cert:
+ args += ["--client-cert", str(repository.client_cert)]
- index_url = repository.authenticated_url
+ index_url = repository.authenticated_url
+
+ args += ["--index-url", index_url]
- args += ["--index-url", index_url]
if (
self._pool.has_default()
and repository.name != self._pool.repositories[0].name
):
- args += [
- "--extra-index-url",
- self._pool.repositories[0].authenticated_url,
- ]
+ first_repository = self._pool.repositories[0]
+
+ if isinstance(first_repository, HTTPRepository):
+ args += [
+ "--extra-index-url",
+ first_repository.authenticated_url,
+ ]
if update:
args.append("-U")
+ req: str | list[str]
if package.files and not package.source_url:
# Format as a requirements.txt
# We need to create a requirements.txt file
@@ -128,12 +135,12 @@ def remove(self, package: Package) -> None:
if package.source_type == "git":
src_dir = self._env.path / "src" / package.name
if src_dir.exists():
- safe_rmtree(str(src_dir))
+ remove_directory(src_dir, force=True)
- def run(self, *args: Any, **kwargs: Any) -> str:
+ def run(self, *args: Any, **kwargs: Any) -> int | str:
return self._env.run_pip(*args, **kwargs)
- def requirement(self, package: Package, formatted: bool = False) -> str:
+ def requirement(self, package: Package, formatted: bool = False) -> str | list[str]:
if formatted and not package.source_type:
req = f"{package.name}=={package.version}"
for f in package.files:
@@ -155,7 +162,7 @@ def requirement(self, package: Package, formatted: bool = False) -> str:
req = os.path.realpath(package.source_url)
if package.develop and package.source_type == "directory":
- req = ["-e", req]
+ return ["-e", req]
return req
@@ -166,7 +173,7 @@ def requirement(self, package: Package, formatted: bool = False) -> str:
)
if package.develop:
- req = ["-e", req]
+ return ["-e", req]
return req
@@ -177,9 +184,12 @@ def requirement(self, package: Package, formatted: bool = False) -> str:
def create_temporary_requirement(self, package: Package) -> str:
fd, name = tempfile.mkstemp("reqs.txt", f"{package.name}-{package.version}")
+ req = self.requirement(package, formatted=True)
+ if isinstance(req, list):
+ req = " ".join(req)
try:
- os.write(fd, encode(self.requirement(package, formatted=True)))
+ os.write(fd, encode(req))
finally:
os.close(fd)
@@ -231,7 +241,7 @@ def install_directory(self, package: Package) -> str | int:
with builder.setup_py():
if package.develop:
return pip_install(
- directory=req,
+ path=req,
environment=self._env,
upgrade=True,
editable=True,
@@ -242,33 +252,25 @@ def install_directory(self, package: Package) -> str | int:
if package.develop:
return pip_install(
- directory=req, environment=self._env, upgrade=True, editable=True
+ path=req, environment=self._env, upgrade=True, editable=True
)
return pip_install(path=req, environment=self._env, deps=False, upgrade=True)
def install_git(self, package: Package) -> None:
from poetry.core.packages.package import Package
- from poetry.core.vcs.git import Git
-
- src_dir = self._env.path / "src" / package.name
- if src_dir.exists():
- safe_rmtree(str(src_dir))
-
- src_dir.parent.mkdir(exist_ok=True)
-
- git = Git()
- git.clone(package.source_url, src_dir)
- reference = package.source_resolved_reference
- if not reference:
- reference = package.source_reference
+ from poetry.vcs.git import Git
- git.checkout(reference, src_dir)
+ source = Git.clone(
+ url=package.source_url,
+ source_root=self._env.path / "src",
+ revision=package.source_resolved_reference or package.source_reference,
+ )
# Now we just need to install from the source directory
pkg = Package(package.name, package.version)
pkg._source_type = "directory"
- pkg._source_url = str(src_dir)
+ pkg._source_url = str(source.path)
pkg.develop = package.develop
self.install_directory(pkg)
diff --git a/src/poetry/json/__init__.py b/src/poetry/json/__init__.py
index af8d3d6175d..d5914f04737 100644
--- a/src/poetry/json/__init__.py
+++ b/src/poetry/json/__init__.py
@@ -3,6 +3,8 @@
import json
import os
+from typing import Any
+
import jsonschema
@@ -14,7 +16,7 @@ class ValidationError(ValueError):
pass
-def validate_object(obj: dict, schema_name: str) -> list[str]:
+def validate_object(obj: dict[str, Any], schema_name: str) -> list[str]:
schema = os.path.join(SCHEMA_DIR, f"{schema_name}.json")
if not os.path.exists(schema):
@@ -24,7 +26,10 @@ def validate_object(obj: dict, schema_name: str) -> list[str]:
schema = json.loads(f.read())
validator = jsonschema.Draft7Validator(schema)
- validation_errors = sorted(validator.iter_errors(obj), key=lambda e: e.path)
+ validation_errors = sorted(
+ validator.iter_errors(obj),
+ key=lambda e: e.path, # type: ignore[no-any-return]
+ )
errors = []
diff --git a/src/poetry/layouts/layout.py b/src/poetry/layouts/layout.py
index 349dba4864f..c4a91506a35 100644
--- a/src/poetry/layouts/layout.py
+++ b/src/poetry/layouts/layout.py
@@ -2,11 +2,14 @@
from pathlib import Path
from typing import TYPE_CHECKING
+from typing import Any
+from typing import Mapping
from tomlkit import dumps
from tomlkit import inline_table
from tomlkit import loads
from tomlkit import table
+from tomlkit.toml_document import TOMLDocument
from poetry.utils.helpers import canonicalize_name
from poetry.utils.helpers import module_name
@@ -48,9 +51,9 @@ def __init__(
author: str | None = None,
license: str | None = None,
python: str = "*",
- dependencies: dict[str, str] | None = None,
- dev_dependencies: dict[str, str] | None = None,
- ):
+ dependencies: dict[str, str | Mapping[str, Any]] | None = None,
+ dev_dependencies: dict[str, str | Mapping[str, Any]] | None = None,
+ ) -> None:
self._project = canonicalize_name(project).replace(".", "-")
self._package_path_relative = Path(
*(module_name(part) for part in canonicalize_name(project).split("."))
@@ -88,11 +91,21 @@ def package_path(self) -> Path:
def get_package_include(self) -> InlineTable | None:
package = inline_table()
- include = self._package_path_relative.parts[0]
- package.append("include", include)
+ # If a project is created in the root directory (this is reasonable inside a
+ # docker container, eg )
+ # then parts will be empty.
+ parts = self._package_path_relative.parts
+ if not parts:
+ return None
+
+ include = parts[0]
+ package.append("include", include) # type: ignore[no-untyped-call]
if self.basedir != Path():
- package.append("from", self.basedir.as_posix())
+ package.append( # type: ignore[no-untyped-call]
+ "from",
+ self.basedir.as_posix(),
+ )
else:
if include == self._project:
# package include and package name are the same,
@@ -115,7 +128,7 @@ def create(self, path: Path, with_tests: bool = True) -> None:
def generate_poetry_content(self, original: PyProjectTOML | None = None) -> str:
template = POETRY_DEFAULT
- content = loads(template)
+ content: dict[str, Any] = loads(template)
poetry_content = content["tool"]["poetry"]
poetry_content["name"] = self._project
@@ -162,14 +175,15 @@ def generate_poetry_content(self, original: PyProjectTOML | None = None) -> str:
build_system.add("requires", ["poetry-core" + build_system_version])
build_system.add("build-backend", "poetry.core.masonry.api")
+ assert isinstance(content, TOMLDocument)
content.add("build-system", build_system)
- content = dumps(content)
+ text = dumps(content)
if original and original.file.exists():
- content = dumps(original.data) + "\n" + content
+ text = dumps(original.data) + "\n" + text
- return content
+ return text
def _create_default(self, path: Path, src: bool = True) -> None:
package_path = path / self.package_path
diff --git a/src/poetry/locations.py b/src/poetry/locations.py
index 5312619a02e..33c5aa8c0ad 100644
--- a/src/poetry/locations.py
+++ b/src/poetry/locations.py
@@ -1,19 +1,42 @@
from __future__ import annotations
+import logging
import os
+import sys
from pathlib import Path
-from poetry.utils.appdirs import user_cache_dir
-from poetry.utils.appdirs import user_config_dir
-from poetry.utils.appdirs import user_data_dir
+from platformdirs import user_cache_path
+from platformdirs import user_config_path
+from platformdirs import user_data_path
-CACHE_DIR = user_cache_dir("pypoetry")
-DATA_DIR = user_data_dir("pypoetry")
-CONFIG_DIR = user_config_dir("pypoetry")
+logger = logging.getLogger(__name__)
-REPOSITORY_CACHE_DIR = Path(CACHE_DIR) / "cache" / "repositories"
+CACHE_DIR = user_cache_path("pypoetry", appauthor=False)
+CONFIG_DIR = user_config_path("pypoetry", appauthor=False, roaming=True)
+
+REPOSITORY_CACHE_DIR = CACHE_DIR / "cache" / "repositories"
+
+# platformdirs 2.0.0 corrected the OSX/macOS config directory from
+# /Users//Library/Application Support/ to
+# /Users//Library/Preferences/.
+#
+# For now we only deprecate use of the old directory.
+if sys.platform == "darwin":
+ _LEGACY_CONFIG_DIR = CONFIG_DIR.parent.parent / "Application Support" / "pypoetry"
+ config_toml = _LEGACY_CONFIG_DIR / "config.toml"
+ auth_toml = _LEGACY_CONFIG_DIR / "auth.toml"
+
+ if any(file.exists() for file in (auth_toml, config_toml)):
+ logger.warning(
+ "Configuration file exists at %s, reusing this directory.\n\nConsider"
+ " moving configuration to %s, as support for the legacy directory will be"
+ " removed in an upcoming release.",
+ _LEGACY_CONFIG_DIR,
+ CONFIG_DIR,
+ )
+ CONFIG_DIR = _LEGACY_CONFIG_DIR
def data_dir() -> Path:
@@ -21,4 +44,4 @@ def data_dir() -> Path:
if poetry_home:
return Path(poetry_home).expanduser()
- return Path(user_data_dir("pypoetry", roaming=True))
+ return user_data_path("pypoetry", appauthor=False, roaming=True)
diff --git a/src/poetry/masonry/builders/__init__.py b/src/poetry/masonry/builders/__init__.py
index ea5ca59a998..61662422c39 100644
--- a/src/poetry/masonry/builders/__init__.py
+++ b/src/poetry/masonry/builders/__init__.py
@@ -1,3 +1,6 @@
from __future__ import annotations
from poetry.masonry.builders.editable import EditableBuilder
+
+
+__all__ = ["EditableBuilder"]
diff --git a/src/poetry/masonry/builders/editable.py b/src/poetry/masonry/builders/editable.py
index 188ece7c47e..de4ce1ca5c7 100644
--- a/src/poetry/masonry/builders/editable.py
+++ b/src/poetry/masonry/builders/editable.py
@@ -15,14 +15,15 @@
from poetry.utils._compat import WINDOWS
from poetry.utils._compat import decode
+from poetry.utils.env import build_environment
from poetry.utils.helpers import is_dir_writable
from poetry.utils.pip import pip_install
if TYPE_CHECKING:
from cleo.io.io import IO
- from poetry.core.poetry import Poetry
+ from poetry.poetry import Poetry
from poetry.utils.env import Env
SCRIPT_TEMPLATE = """\
@@ -39,7 +40,7 @@
"""
-class EditableBuilder(Builder):
+class EditableBuilder(Builder): # type: ignore[misc]
def __init__(self, poetry: Poetry, env: Env, io: IO) -> None:
super().__init__(poetry)
@@ -74,9 +75,10 @@ def build(self) -> None:
added_files += self._add_scripts()
self._add_dist_info(added_files)
- def _run_build_script(self, build_script: Path) -> None:
- self._debug(f" - Executing build script: {build_script}")
- self._env.run("python", str(self._path.joinpath(build_script)), call=True)
+ def _run_build_script(self, build_script: str) -> None:
+ with build_environment(poetry=self._poetry, env=self._env, io=self._io) as env:
+ self._debug(f" - Executing build script: {build_script}")
+ env.run("python", str(self._path.joinpath(build_script)), call=True)
def _setup_build(self) -> None:
builder = SdistBuilder(self._poetry)
diff --git a/src/poetry/mixology/__init__.py b/src/poetry/mixology/__init__.py
index f32e57aff19..88d9d174229 100644
--- a/src/poetry/mixology/__init__.py
+++ b/src/poetry/mixology/__init__.py
@@ -16,8 +16,8 @@
def resolve_version(
root: ProjectPackage,
provider: Provider,
- locked: dict[str, list[DependencyPackage]] = None,
- use_latest: list[str] = None,
+ locked: dict[str, list[DependencyPackage]] | None = None,
+ use_latest: list[str] | None = None,
) -> SolverResult:
solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest)
diff --git a/src/poetry/mixology/failure.py b/src/poetry/mixology/failure.py
index d908c786c39..6c11350aba9 100644
--- a/src/poetry/mixology/failure.py
+++ b/src/poetry/mixology/failure.py
@@ -63,7 +63,7 @@ def write(self) -> str:
buffer.append("")
if isinstance(self._root.cause, ConflictCause):
- self._visit(self._root, {})
+ self._visit(self._root)
else:
self._write(self._root, f"Because {self._root}, version solving failed.")
@@ -108,7 +108,6 @@ def _write(
def _visit(
self,
incompatibility: Incompatibility,
- details_for_incompatibility: dict,
conclusion: bool = False,
) -> None:
numbered = conclusion or self._derivations[incompatibility] > 1
@@ -117,7 +116,6 @@ def _visit(
cause: ConflictCause = cast(ConflictCause, incompatibility.cause)
- details_for_cause: dict = {}
if isinstance(cause.conflict.cause, ConflictCause) and isinstance(
cause.other.cause, ConflictCause
):
@@ -126,7 +124,7 @@ def _visit(
if conflict_line is not None and other_line is not None:
reason = cause.conflict.and_to_string(
- cause.other, details_for_cause, conflict_line, other_line
+ cause.other, conflict_line, other_line
)
self._write(
incompatibility,
@@ -143,7 +141,7 @@ def _visit(
without_line = cause.conflict
line = other_line
- self._visit(without_line, details_for_cause)
+ self._visit(without_line)
self._write(
incompatibility,
f"{conjunction} because {with_line!s} ({line}),"
@@ -157,18 +155,18 @@ def _visit(
if single_line_other or single_line_conflict:
first = cause.conflict if single_line_other else cause.other
second = cause.other if single_line_other else cause.conflict
- self._visit(first, details_for_cause)
- self._visit(second, details_for_cause)
+ self._visit(first)
+ self._visit(second)
self._write(
incompatibility,
f"Thus, {incompatibility_string}.",
numbered=numbered,
)
else:
- self._visit(cause.conflict, {}, conclusion=True)
+ self._visit(cause.conflict, conclusion=True)
self._lines.append(("", None))
- self._visit(cause.other, details_for_cause)
+ self._visit(cause.other)
self._write(
incompatibility,
@@ -193,9 +191,7 @@ def _visit(
derived_line = self._line_numbers.get(derived)
if derived_line is not None:
- reason = ext.and_to_string(
- derived, details_for_cause, None, derived_line
- )
+ reason = ext.and_to_string(derived, None, derived_line)
self._write(
incompatibility,
f"Because {reason}, {incompatibility_string}.",
@@ -211,26 +207,22 @@ def _visit(
collapsed_ext = derived_cause.conflict
- details_for_cause = {}
-
- self._visit(collapsed_derived, details_for_cause)
- reason = collapsed_ext.and_to_string(ext, details_for_cause, None, None)
+ self._visit(collapsed_derived)
+ reason = collapsed_ext.and_to_string(ext, None, None)
self._write(
incompatibility,
f"{conjunction} because {reason}, {incompatibility_string}.",
numbered=numbered,
)
else:
- self._visit(derived, details_for_cause)
+ self._visit(derived)
self._write(
incompatibility,
f"{conjunction} because {ext!s}, {incompatibility_string}.",
numbered=numbered,
)
else:
- reason = cause.conflict.and_to_string(
- cause.other, details_for_cause, None, None
- )
+ reason = cause.conflict.and_to_string(cause.other, None, None)
self._write(
incompatibility,
f"Because {reason}, {incompatibility_string}.",
diff --git a/src/poetry/mixology/incompatibility.py b/src/poetry/mixology/incompatibility.py
index da232436110..4c08f7cef57 100644
--- a/src/poetry/mixology/incompatibility.py
+++ b/src/poetry/mixology/incompatibility.py
@@ -1,7 +1,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING
-from typing import Callable
from typing import Iterator
from poetry.mixology.incompatibility_cause import ConflictCause
@@ -14,6 +13,8 @@
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.mixology.incompatibility_cause import IncompatibilityCause
from poetry.mixology.term import Term
@@ -214,23 +215,18 @@ def __str__(self) -> str:
def and_to_string(
self,
other: Incompatibility,
- details: dict,
this_line: int | None,
other_line: int | None,
) -> str:
- requires_both = self._try_requires_both(other, details, this_line, other_line)
+ requires_both = self._try_requires_both(other, this_line, other_line)
if requires_both is not None:
return requires_both
- requires_through = self._try_requires_through(
- other, details, this_line, other_line
- )
+ requires_through = self._try_requires_through(other, this_line, other_line)
if requires_through is not None:
return requires_through
- requires_forbidden = self._try_requires_forbidden(
- other, details, this_line, other_line
- )
+ requires_forbidden = self._try_requires_forbidden(other, this_line, other_line)
if requires_forbidden is not None:
return requires_forbidden
@@ -248,7 +244,6 @@ def and_to_string(
def _try_requires_both(
self,
other: Incompatibility,
- details: dict,
this_line: int | None,
other_line: int | None,
) -> str | None:
@@ -298,7 +293,6 @@ def _try_requires_both(
def _try_requires_through(
self,
other: Incompatibility,
- details: dict,
this_line: int | None,
other_line: int | None,
) -> str | None:
@@ -380,7 +374,6 @@ def _try_requires_through(
def _try_requires_forbidden(
self,
other: Incompatibility,
- details: dict,
this_line: int | None,
other_line: int | None,
) -> str | None:
@@ -442,7 +435,8 @@ def _terse(self, term: Term, allow_every: bool = False) -> str:
return f"every version of {term.dependency.complete_name}"
if term.dependency.is_root:
- return term.dependency.pretty_name
+ pretty_name: str = term.dependency.pretty_name
+ return pretty_name
return f"{term.dependency.pretty_name} ({term.dependency.pretty_constraint})"
diff --git a/src/poetry/mixology/solutions/providers/__init__.py b/src/poetry/mixology/solutions/providers/__init__.py
index 9470041fd57..cfbd1873848 100644
--- a/src/poetry/mixology/solutions/providers/__init__.py
+++ b/src/poetry/mixology/solutions/providers/__init__.py
@@ -3,3 +3,6 @@
from poetry.mixology.solutions.providers.python_requirement_solution_provider import (
PythonRequirementSolutionProvider,
)
+
+
+__all__ = ["PythonRequirementSolutionProvider"]
diff --git a/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py b/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
index c9eed19ec4c..dba0d58480e 100644
--- a/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
+++ b/src/poetry/mixology/solutions/providers/python_requirement_solution_provider.py
@@ -13,7 +13,7 @@
from poetry.puzzle.exceptions import SolverProblemError
-class PythonRequirementSolutionProvider(HasSolutionsForException):
+class PythonRequirementSolutionProvider(HasSolutionsForException): # type: ignore[misc]
def can_solve(self, exception: Exception) -> bool:
from poetry.puzzle.exceptions import SolverProblemError
diff --git a/src/poetry/mixology/solutions/solutions/__init__.py b/src/poetry/mixology/solutions/solutions/__init__.py
index 51b8449071b..e78e9a53361 100644
--- a/src/poetry/mixology/solutions/solutions/__init__.py
+++ b/src/poetry/mixology/solutions/solutions/__init__.py
@@ -3,3 +3,6 @@
from poetry.mixology.solutions.solutions.python_requirement_solution import (
PythonRequirementSolution,
)
+
+
+__all__ = ["PythonRequirementSolution"]
diff --git a/src/poetry/mixology/solutions/solutions/python_requirement_solution.py b/src/poetry/mixology/solutions/solutions/python_requirement_solution.py
index 8d5ed5f0096..cfade7669c7 100644
--- a/src/poetry/mixology/solutions/solutions/python_requirement_solution.py
+++ b/src/poetry/mixology/solutions/solutions/python_requirement_solution.py
@@ -10,7 +10,7 @@
from poetry.puzzle.exceptions import SolverProblemError
-class PythonRequirementSolution(Solution):
+class PythonRequirementSolution(Solution): # type: ignore[misc]
def __init__(self, exception: SolverProblemError) -> None:
from poetry.core.semver.helpers import parse_constraint
diff --git a/src/poetry/mixology/term.py b/src/poetry/mixology/term.py
index 014d840ef85..260567b1b7f 100644
--- a/src/poetry/mixology/term.py
+++ b/src/poetry/mixology/term.py
@@ -9,7 +9,7 @@
if TYPE_CHECKING:
from poetry.core.packages.dependency import Dependency
- from poetry.core.semver.helpers import VersionTypes
+ from poetry.core.semver.version_constraint import VersionConstraint
class Term:
@@ -33,7 +33,7 @@ def dependency(self) -> Dependency:
return self._dependency
@property
- def constraint(self) -> VersionTypes:
+ def constraint(self) -> VersionConstraint:
return self._dependency.constraint
def is_positive(self) -> bool:
@@ -152,14 +152,15 @@ def difference(self, other: Term) -> Term | None:
return self.intersect(other.inverse)
def _compatible_dependency(self, other: Dependency) -> bool:
- return (
+ compatible: bool = (
self.dependency.is_root
or other.is_root
or other.is_same_package_as(self.dependency)
)
+ return compatible
def _non_empty_term(
- self, constraint: VersionTypes, is_positive: bool
+ self, constraint: VersionConstraint, is_positive: bool
) -> Term | None:
if constraint.is_empty():
return None
diff --git a/src/poetry/mixology/version_solver.py b/src/poetry/mixology/version_solver.py
index 75aa2d329d7..d39591b4b8f 100644
--- a/src/poetry/mixology/version_solver.py
+++ b/src/poetry/mixology/version_solver.py
@@ -22,7 +22,6 @@
if TYPE_CHECKING:
- from poetry.core.packages.package import Package
from poetry.core.packages.project_package import ProjectPackage
from poetry.puzzle.provider import Provider
@@ -40,20 +39,28 @@ class DependencyCache:
again.
"""
- def __init__(self, provider: Provider):
+ def __init__(self, provider: Provider) -> None:
self.provider = provider
- self.cache: dict[str, list[Package]] = {}
+ self.cache: dict[
+ tuple[str, str | None, str | None, str | None], list[DependencyPackage]
+ ] = {}
@functools.lru_cache(maxsize=128)
def search_for(self, dependency: Dependency) -> list[DependencyPackage]:
- complete_name = dependency.complete_name
- packages = self.cache.get(complete_name)
+ key = (
+ dependency.complete_name,
+ dependency.source_type,
+ dependency.source_url,
+ dependency.source_reference,
+ )
+
+ packages = self.cache.get(key)
if packages is None:
packages = self.provider.search_for(dependency)
else:
packages = [p for p in packages if dependency.constraint.allows(p.version)]
- self.cache[complete_name] = packages
+ self.cache[key] = packages
return packages
@@ -74,9 +81,9 @@ def __init__(
self,
root: ProjectPackage,
provider: Provider,
- locked: dict[str, list[Package]] = None,
- use_latest: list[str] = None,
- ):
+ locked: dict[str, list[DependencyPackage]] | None = None,
+ use_latest: list[str] | None = None,
+ ) -> None:
self._root = root
self._provider = provider
self._dependency_cache = DependencyCache(provider)
@@ -109,7 +116,7 @@ def solve(self) -> SolverResult:
)
try:
- next = self._root.name
+ next: str | None = self._root.name
while next is not None:
self._propagate(next)
next = self._choose_package_version()
@@ -213,7 +220,8 @@ def _propagate_incompatibility(
unsatisfied.dependency, not unsatisfied.is_positive(), incompatibility
)
- return unsatisfied.dependency.complete_name
+ complete_name: str = unsatisfied.dependency.complete_name
+ return complete_name
def _resolve_conflict(self, incompatibility: Incompatibility) -> Incompatibility:
"""
@@ -405,7 +413,8 @@ def _get_min(dependency: Dependency) -> tuple[bool, int]:
self._add_incompatibility(
Incompatibility([Term(dependency, True)], PackageNotFoundCause(e))
)
- return dependency.complete_name
+ complete_name: str = dependency.complete_name
+ return complete_name
package = None
if dependency.name not in self._use_latest:
@@ -428,7 +437,8 @@ def _get_min(dependency: Dependency) -> tuple[bool, int]:
Incompatibility([Term(dependency, True)], NoVersionsCause())
)
- return dependency.complete_name
+ complete_name = dependency.complete_name
+ return complete_name
else:
package = locked
@@ -450,12 +460,13 @@ def _get_min(dependency: Dependency) -> tuple[bool, int]:
)
if not conflict:
- self._solution.decide(package)
+ self._solution.decide(package.package)
self._log(
f"selecting {package.complete_name} ({package.full_pretty_version})"
)
- return dependency.complete_name
+ complete_name = dependency.complete_name
+ return complete_name
def _result(self) -> SolverResult:
"""
@@ -494,7 +505,7 @@ def _get_locked(
locked = self._locked.get(dependency.name, [])
for package in locked:
- if (allow_similar or dependency.is_same_package_as(package)) and (
+ if (allow_similar or dependency.is_same_package_as(package.package)) and (
dependency.constraint.allows(package.version)
or package.is_prerelease()
and dependency.constraint.allows(package.version.next_patch())
diff --git a/src/poetry/packages/__init__.py b/src/poetry/packages/__init__.py
index 719e44d963b..b97ce25b635 100644
--- a/src/poetry/packages/__init__.py
+++ b/src/poetry/packages/__init__.py
@@ -3,3 +3,6 @@
from poetry.packages.dependency_package import DependencyPackage
from poetry.packages.locker import Locker
from poetry.packages.package_collection import PackageCollection
+
+
+__all__ = ["DependencyPackage", "Locker", "PackageCollection"]
diff --git a/src/poetry/packages/dependency_package.py b/src/poetry/packages/dependency_package.py
index f020ac5d130..1cdae4ba4a9 100644
--- a/src/poetry/packages/dependency_package.py
+++ b/src/poetry/packages/dependency_package.py
@@ -49,8 +49,9 @@ def __repr__(self) -> str:
def __hash__(self) -> int:
return hash(self._package)
- def __eq__(self, other: Package | DependencyPackage) -> bool:
+ def __eq__(self, other: object) -> bool:
if isinstance(other, DependencyPackage):
other = other.package
- return self._package == other
+ equal: bool = self._package == other
+ return equal
diff --git a/src/poetry/packages/locker.py b/src/poetry/packages/locker.py
index ee406e1830b..a12dc7476e2 100644
--- a/src/poetry/packages/locker.py
+++ b/src/poetry/packages/locker.py
@@ -9,18 +9,18 @@
from hashlib import sha256
from pathlib import Path
from typing import TYPE_CHECKING
+from typing import Any
from typing import Iterable
from typing import Iterator
from typing import Sequence
+from typing import cast
from poetry.core.packages.dependency import Dependency
-
-
-try:
- from poetry.core.packages.dependency_group import MAIN_GROUP
-except ImportError:
- MAIN_GROUP = "default"
+from poetry.core.packages.directory_dependency import DirectoryDependency
+from poetry.core.packages.file_dependency import FileDependency
from poetry.core.packages.package import Package
+from poetry.core.packages.url_dependency import URLDependency
+from poetry.core.packages.vcs_dependency import VCSDependency
from poetry.core.semver.helpers import parse_constraint
from poetry.core.semver.version import Version
from poetry.core.toml.file import TOMLFile
@@ -32,6 +32,8 @@
from tomlkit import item
from tomlkit import table
from tomlkit.exceptions import TOMLKitError
+from tomlkit.items import Array
+from tomlkit.items import Table
from poetry.packages import DependencyPackage
from poetry.utils.extras import get_extra_package_names
@@ -39,7 +41,6 @@
if TYPE_CHECKING:
from poetry.core.version.markers import BaseMarker
- from tomlkit.items import InlineTable
from tomlkit.toml_document import TOMLDocument
from poetry.repositories import Repository
@@ -54,10 +55,10 @@ class Locker:
_legacy_keys = ["dependencies", "source", "extras", "dev-dependencies"]
_relevant_keys = [*_legacy_keys, "group"]
- def __init__(self, lock: str | Path, local_config: dict) -> None:
+ def __init__(self, lock: str | Path, local_config: dict[str, Any]) -> None:
self._lock = TOMLFile(lock)
self._local_config = local_config
- self._lock_data = None
+ self._lock_data: TOMLDocument | None = None
self._content_hash = self._get_content_hash()
@property
@@ -88,7 +89,8 @@ def is_fresh(self) -> bool:
metadata = lock.get("metadata", {})
if "content-hash" in metadata:
- return self._content_hash == lock["metadata"]["content-hash"]
+ fresh: bool = self._content_hash == metadata["content-hash"]
+ return fresh
return False
@@ -104,7 +106,7 @@ def locked_repository(self) -> Repository:
lock_data = self.lock_data
packages = Repository()
- locked_packages = lock_data["package"]
+ locked_packages = cast("list[dict[str, Any]]", lock_data["package"])
if not locked_packages:
return packages
@@ -127,16 +129,16 @@ def locked_repository(self) -> Repository:
)
package.description = info.get("description", "")
package.category = info.get("category", "main")
- package.groups = info.get("groups", [MAIN_GROUP])
package.optional = info["optional"]
- if "hashes" in lock_data["metadata"]:
+ metadata = cast("dict[str, Any]", lock_data["metadata"])
+ name = info["name"]
+ if "hashes" in metadata:
# Old lock so we create dummy files from the hashes
- package.files = [
- {"name": h, "hash": h}
- for h in lock_data["metadata"]["hashes"][info["name"]]
- ]
+ hashes = cast("dict[str, Any]", metadata["hashes"])
+ package.files = [{"name": h, "hash": h} for h in hashes[name]]
else:
- package.files = lock_data["metadata"]["files"][info["name"]]
+ files = metadata["files"][name]
+ package.files = files
package.python_versions = info["python-versions"]
extras = info.get("extras", {})
@@ -182,6 +184,7 @@ def locked_repository(self) -> Repository:
if package.source_type == "directory":
# root dir should be the source of the package relative to the lock
# path
+ assert package.source_url is not None
root_dir = Path(package.source_url)
if isinstance(constraint, list):
@@ -304,7 +307,10 @@ def get_project_dependencies(
# Put higher versions first so that we prefer them.
for packages in packages_by_name.values():
- packages.sort(key=lambda package: package.version, reverse=True)
+ packages.sort(
+ key=lambda package: package.version, # type: ignore[no-any-return]
+ reverse=True,
+ )
nested_dependencies = cls.__walk_dependencies(
dependencies=project_requires,
@@ -368,10 +374,10 @@ def get_project_dependency_packages(
yield DependencyPackage(dependency=dependency, package=package)
def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
- files = table()
- packages = self._lock_packages(packages)
+ files: dict[str, Any] = table()
+ package_specs = self._lock_packages(packages)
# Retrieving hashes
- for package in packages:
+ for package in package_specs:
if package["name"] not in files:
files[package["name"]] = []
@@ -383,12 +389,14 @@ def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
files[package["name"]].append(file_metadata)
if files[package["name"]]:
- files[package["name"]] = item(files[package["name"]]).multiline(True)
+ package_files = item(files[package["name"]])
+ assert isinstance(package_files, Array)
+ files[package["name"]] = package_files.multiline(True)
del package["files"]
lock = document()
- lock["package"] = packages
+ lock["package"] = package_specs
if root.extras:
lock["extras"] = {
@@ -441,11 +449,12 @@ def _get_lock_data(self) -> TOMLDocument:
raise RuntimeError("No lockfile found. Unable to read locked packages")
try:
- lock_data = self._lock.read()
+ lock_data: TOMLDocument = self._lock.read()
except TOMLKitError as e:
raise RuntimeError(f"Unable to read the lock file ({e}).")
- lock_version = Version.parse(lock_data["metadata"].get("lock-version", "1.0"))
+ metadata = cast(Table, lock_data["metadata"])
+ lock_version = Version.parse(metadata.get("lock-version", "1.0"))
current_version = Version.parse(self._VERSION)
# We expect the locker to be able to read lock files
# from the same semantic versioning range
@@ -469,7 +478,7 @@ def _get_lock_data(self) -> TOMLDocument:
return lock_data
- def _lock_packages(self, packages: list[Package]) -> list:
+ def _lock_packages(self, packages: list[Package]) -> list[dict[str, Any]]:
locked = []
for package in sorted(packages, key=lambda x: (x.name, x.version)):
@@ -479,22 +488,34 @@ def _lock_packages(self, packages: list[Package]) -> list:
return locked
- def _dump_package(self, package: Package) -> dict:
- dependencies: dict[str, list[InlineTable]] = {}
- for dependency in sorted(package.requires, key=lambda d: d.name):
+ def _dump_package(self, package: Package) -> dict[str, Any]:
+ dependencies: dict[str, list[Any]] = {}
+ for dependency in sorted(
+ package.requires,
+ key=lambda d: d.name, # type: ignore[no-any-return]
+ ):
if dependency.pretty_name not in dependencies:
dependencies[dependency.pretty_name] = []
constraint = inline_table()
- if dependency.is_directory() or dependency.is_file():
+ if dependency.is_directory():
+ dependency = cast(DirectoryDependency, dependency)
constraint["path"] = dependency.path.as_posix()
- if dependency.is_directory() and dependency.develop:
+ if dependency.develop:
constraint["develop"] = True
+
+ elif dependency.is_file():
+ dependency = cast(FileDependency, dependency)
+ constraint["path"] = dependency.path.as_posix()
+
elif dependency.is_url():
+ dependency = cast(URLDependency, dependency)
constraint["url"] = dependency.url
+
elif dependency.is_vcs():
+ dependency = cast(VCSDependency, dependency)
constraint[dependency.vcs] = dependency.source
if dependency.branch:
@@ -519,23 +540,26 @@ def _dump_package(self, package: Package) -> dict:
# All the constraints should have the same type,
# but we want to simplify them if it's possible
- for dependency, constraints in tuple(dependencies.items()):
+ for dependency_name, constraints in dependencies.items():
if all(
len(constraint) == 1 and "version" in constraint
for constraint in constraints
):
- dependencies[dependency] = [
+ dependencies[dependency_name] = [
constraint["version"] for constraint in constraints
]
- data = {
+ data: dict[str, Any] = {
"name": package.pretty_name,
"version": package.pretty_version,
"description": package.description or "",
"category": package.category,
"optional": package.optional,
"python-versions": package.python_versions,
- "files": sorted(package.files, key=lambda x: x["file"]),
+ "files": sorted(
+ package.files,
+ key=lambda x: x["file"], # type: ignore[no-any-return]
+ ),
}
if dependencies:
diff --git a/src/poetry/packages/package_collection.py b/src/poetry/packages/package_collection.py
index 0a4213e9f43..909a61bc486 100644
--- a/src/poetry/packages/package_collection.py
+++ b/src/poetry/packages/package_collection.py
@@ -1,6 +1,7 @@
from __future__ import annotations
from typing import TYPE_CHECKING
+from typing import Sequence
from poetry.packages.dependency_package import DependencyPackage
@@ -10,11 +11,11 @@
from poetry.core.packages.package import Package
-class PackageCollection(list):
+class PackageCollection(list): # type: ignore[type-arg]
def __init__(
self,
dependency: Dependency,
- packages: list[Package | DependencyPackage] = None,
+ packages: Sequence[Package | DependencyPackage] | None = None,
) -> None:
self._dependency = dependency
diff --git a/src/poetry/packages/project_package.py b/src/poetry/packages/project_package.py
deleted file mode 100644
index 698c3aa1822..00000000000
--- a/src/poetry/packages/project_package.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from poetry.core.packages.project_package import ProjectPackage as _ProjectPackage
-
-
-if TYPE_CHECKING:
- from poetry.core.semver.version import Version
-
-
-class ProjectPackage(_ProjectPackage):
- def set_version(
- self, version: str | Version, pretty_version: str | None = None
- ) -> None:
- from poetry.core.semver.version import Version
-
- if not isinstance(version, Version):
- self._version = Version.parse(version)
- self._pretty_version = pretty_version or version
- else:
- self._version = version
- self._pretty_version = pretty_version or version.text
diff --git a/src/poetry/plugins/plugin_manager.py b/src/poetry/plugins/plugin_manager.py
index 851d32de6f8..f219bb09b1a 100644
--- a/src/poetry/plugins/plugin_manager.py
+++ b/src/poetry/plugins/plugin_manager.py
@@ -33,7 +33,11 @@ def load_plugins(self) -> None:
self._load_plugin_entrypoint(entrypoint)
def get_plugin_entry_points(self) -> list[entrypoints.EntryPoint]:
- return entrypoints.get_group_all(self._group)
+
+ entry_points: list[entrypoints.EntryPoint] = entrypoints.get_group_all(
+ self._group
+ )
+ return entry_points
def add_plugin(self, plugin: Plugin) -> None:
if not isinstance(plugin, (Plugin, ApplicationPlugin)):
diff --git a/src/poetry/poetry.py b/src/poetry/poetry.py
index 706cec314be..6f571aeeaea 100644
--- a/src/poetry/poetry.py
+++ b/src/poetry/poetry.py
@@ -1,6 +1,7 @@
from __future__ import annotations
from typing import TYPE_CHECKING
+from typing import Any
from poetry.core.poetry import Poetry as BasePoetry
@@ -19,18 +20,18 @@
from poetry.repositories.pool import Pool
-class Poetry(BasePoetry):
+class Poetry(BasePoetry): # type: ignore[misc]
VERSION = __version__
def __init__(
self,
file: Path,
- local_config: dict,
+ local_config: dict[str, Any],
package: ProjectPackage,
locker: Locker,
config: Config,
- ):
+ ) -> None:
from poetry.repositories.pool import Pool
super().__init__(file, local_config, package)
diff --git a/src/poetry/publishing/__init__.py b/src/poetry/publishing/__init__.py
index c7aa27edb1c..2cb619a2acd 100644
--- a/src/poetry/publishing/__init__.py
+++ b/src/poetry/publishing/__init__.py
@@ -1,3 +1,6 @@
from __future__ import annotations
from poetry.publishing.publisher import Publisher
+
+
+__all__ = ["Publisher"]
diff --git a/src/poetry/publishing/publisher.py b/src/poetry/publishing/publisher.py
index 4fd6ad00584..95f79afdab1 100644
--- a/src/poetry/publishing/publisher.py
+++ b/src/poetry/publishing/publisher.py
@@ -69,14 +69,14 @@ def publish(
logger.debug(
f"Found authentication information for {repository_name}."
)
- username = auth["username"]
- password = auth["password"]
+ username = auth.username
+ password = auth.password
resolved_client_cert = client_cert or get_client_cert(
self._poetry.config, repository_name
)
# Requesting missing credentials but only if there is not a client cert defined.
- if not resolved_client_cert:
+ if not resolved_client_cert and hasattr(self._io, "ask"):
if username is None:
username = self._io.ask("Username:")
diff --git a/src/poetry/publishing/uploader.py b/src/poetry/publishing/uploader.py
index f5f57143f0a..fc60648a5aa 100644
--- a/src/poetry/publishing/uploader.py
+++ b/src/poetry/publishing/uploader.py
@@ -15,10 +15,10 @@
from requests import adapters
from requests.exceptions import ConnectionError
from requests.exceptions import HTTPError
-from requests.packages.urllib3 import util
from requests_toolbelt import user_agent
from requests_toolbelt.multipart import MultipartEncoder
from requests_toolbelt.multipart import MultipartEncoderMonitor
+from urllib3 import util
from poetry.__version__ import __version__
from poetry.utils.patterns import wheel_file_re
@@ -60,7 +60,8 @@ def __init__(self, poetry: Poetry, io: NullIO) -> None:
@property
def user_agent(self) -> str:
- return user_agent("poetry", __version__)
+ agent: str = user_agent("poetry", __version__)
+ return agent
@property
def adapter(self) -> adapters.HTTPAdapter:
@@ -327,7 +328,7 @@ def _register(self, session: requests.Session, url: str) -> requests.Response:
return resp
- def _prepare_data(self, data: dict) -> list[tuple[str, str]]:
+ def _prepare_data(self, data: dict[str, Any]) -> list[tuple[str, str]]:
data_to_send = []
for key, value in data.items():
if not isinstance(value, (list, tuple)):
diff --git a/src/poetry/puzzle/__init__.py b/src/poetry/puzzle/__init__.py
index 48280ac9bec..d5bc659574a 100644
--- a/src/poetry/puzzle/__init__.py
+++ b/src/poetry/puzzle/__init__.py
@@ -1,3 +1,6 @@
from __future__ import annotations
from poetry.puzzle.solver import Solver
+
+
+__all__ = ["Solver"]
diff --git a/src/poetry/puzzle/exceptions.py b/src/poetry/puzzle/exceptions.py
index 0386adff498..58087c4a78d 100644
--- a/src/poetry/puzzle/exceptions.py
+++ b/src/poetry/puzzle/exceptions.py
@@ -4,7 +4,10 @@
if TYPE_CHECKING:
+ from poetry.core.packages.dependency import Dependency
+
from poetry.mixology.failure import SolveFailure
+ from poetry.packages import DependencyPackage
class SolverProblemError(Exception):
@@ -19,9 +22,11 @@ def error(self) -> SolveFailure:
class OverrideNeeded(Exception):
- def __init__(self, *overrides: dict) -> None:
+ def __init__(
+ self, *overrides: dict[DependencyPackage, dict[str, Dependency]]
+ ) -> None:
self._overrides = overrides
@property
- def overrides(self) -> tuple[dict, ...]:
+ def overrides(self) -> tuple[dict[DependencyPackage, dict[str, Dependency]], ...]:
return self._overrides
diff --git a/src/poetry/puzzle/provider.py b/src/poetry/puzzle/provider.py
index c15ce47865c..9fcee1265f0 100644
--- a/src/poetry/puzzle/provider.py
+++ b/src/poetry/puzzle/provider.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import functools
import logging
import os
import re
@@ -10,17 +11,20 @@
from collections import defaultdict
from contextlib import contextmanager
from pathlib import Path
-from tempfile import mkdtemp
from typing import TYPE_CHECKING
from typing import Any
from typing import Iterable
from typing import Iterator
+from typing import cast
from cleo.ui.progress_indicator import ProgressIndicator
+from poetry.core.packages.directory_dependency import DirectoryDependency
+from poetry.core.packages.file_dependency import FileDependency
+from poetry.core.packages.url_dependency import URLDependency
from poetry.core.packages.utils.utils import get_python_constraint_from_marker
+from poetry.core.packages.vcs_dependency import VCSDependency
from poetry.core.semver.empty_constraint import EmptyConstraint
from poetry.core.semver.version import Version
-from poetry.core.vcs.git import Git
from poetry.core.version.markers import AnyMarker
from poetry.core.version.markers import MarkerUnion
@@ -34,16 +38,14 @@
from poetry.packages.package_collection import PackageCollection
from poetry.puzzle.exceptions import OverrideNeeded
from poetry.utils.helpers import download_file
-from poetry.utils.helpers import safe_rmtree
+from poetry.vcs.git import Git
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.core.packages.dependency import Dependency
- from poetry.core.packages.directory_dependency import DirectoryDependency
- from poetry.core.packages.file_dependency import FileDependency
from poetry.core.packages.package import Package
- from poetry.core.packages.url_dependency import URLDependency
- from poetry.core.packages.vcs_dependency import VCSDependency
from poetry.core.semver.version_constraint import VersionConstraint
from poetry.core.version.markers import BaseMarker
@@ -54,30 +56,80 @@
logger = logging.getLogger(__name__)
-class Indicator(ProgressIndicator):
+class Indicator(ProgressIndicator): # type: ignore[misc]
+ CONTEXT: str | None = None
+
+ @staticmethod
+ @contextmanager
+ def context() -> Iterator[Callable[[str | None], None]]:
+ def _set_context(context: str | None) -> None:
+ Indicator.CONTEXT = context
+
+ yield _set_context
+
+ _set_context(None)
+
+ def _formatter_context(self) -> str:
+ if Indicator.CONTEXT is None:
+ return " "
+ else:
+ return f" {Indicator.CONTEXT}> "
+
def _formatter_elapsed(self) -> str:
elapsed = time.time() - self._start_time
return f"{elapsed:.1f}s"
+@functools.lru_cache(maxsize=None)
+def _get_package_from_git(
+ url: str,
+ branch: str | None = None,
+ tag: str | None = None,
+ rev: str | None = None,
+ source_root: Path | None = None,
+) -> Package:
+ source = Git.clone(
+ url=url,
+ source_root=source_root,
+ branch=branch,
+ tag=tag,
+ revision=rev,
+ clean=False,
+ )
+ revision = Git.get_revision(source)
+
+ package = Provider.get_package_from_directory(Path(source.path))
+ package._source_type = "git"
+ package._source_url = url
+ package._source_reference = rev or tag or branch or "HEAD"
+ package._source_resolved_reference = revision
+
+ return package
+
+
class Provider:
UNSAFE_PACKAGES: set[str] = set()
def __init__(
- self, package: Package, pool: Pool, io: Any, env: Env | None = None
+ self,
+ package: Package,
+ pool: Pool,
+ io: Any,
+ env: Env | None = None,
) -> None:
self._package = package
self._pool = pool
self._io = io
self._env = env
self._python_constraint = package.python_constraint
- self._is_debugging = self._io.is_debug() or self._io.is_very_verbose()
+ self._is_debugging: bool = self._io.is_debug() or self._io.is_very_verbose()
self._in_progress = False
self._overrides: dict[DependencyPackage, dict[str, Dependency]] = {}
self._deferred_cache: dict[Dependency, Package] = {}
self._load_deferred = True
+ self._source_root: Path | None = None
@property
def pool(self) -> Pool:
@@ -86,12 +138,23 @@ def pool(self) -> Pool:
def is_debugging(self) -> bool:
return self._is_debugging
- def set_overrides(self, overrides: dict) -> None:
+ def set_overrides(
+ self, overrides: dict[DependencyPackage, dict[str, Dependency]]
+ ) -> None:
self._overrides = overrides
def load_deferred(self, load_deferred: bool) -> None:
self._load_deferred = load_deferred
+ @contextmanager
+ def use_source_root(self, source_root: Path) -> Iterator[Provider]:
+ original_source_root = self._source_root
+ self._source_root = source_root
+
+ yield self
+
+ self._source_root = original_source_root
+
@contextmanager
def use_environment(self, env: Env) -> Iterator[Provider]:
original_env = self._env
@@ -105,6 +168,17 @@ def use_environment(self, env: Env) -> Iterator[Provider]:
self._env = original_env
self._python_constraint = original_python_constraint
+ @staticmethod
+ def validate_package_for_dependency(
+ dependency: Dependency, package: Package
+ ) -> None:
+ if dependency.name != package.name:
+ # For now, the dependency's name must match the actual package's name
+ raise RuntimeError(
+ f"The dependency name for {dependency.name} does not match the actual"
+ f" package's name: {package.name}"
+ )
+
def search_for(
self,
dependency: (
@@ -125,12 +199,16 @@ def search_for(
return PackageCollection(dependency, [self._package])
if dependency.is_vcs():
+ dependency = cast(VCSDependency, dependency)
packages = self.search_for_vcs(dependency)
elif dependency.is_file():
+ dependency = cast(FileDependency, dependency)
packages = self.search_for_file(dependency)
elif dependency.is_directory():
+ dependency = cast(DirectoryDependency, dependency)
packages = self.search_for_directory(dependency)
elif dependency.is_url():
+ dependency = cast(URLDependency, dependency)
packages = self.search_for_url(dependency)
else:
packages = self._pool.find_packages(dependency)
@@ -161,8 +239,12 @@ def search_for_vcs(self, dependency: VCSDependency) -> list[Package]:
branch=dependency.branch,
tag=dependency.tag,
rev=dependency.rev,
- name=dependency.name,
+ source_root=self._source_root
+ or (self._env.path.joinpath("src") if self._env else None),
)
+
+ self.validate_package_for_dependency(dependency=dependency, package=package)
+
package.develop = dependency.develop
dependency._constraint = package.version
@@ -176,48 +258,25 @@ def search_for_vcs(self, dependency: VCSDependency) -> list[Package]:
return [package]
- @classmethod
+ @staticmethod
def get_package_from_vcs(
- cls,
vcs: str,
url: str,
branch: str | None = None,
tag: str | None = None,
rev: str | None = None,
- name: str | None = None,
+ source_root: Path | None = None,
) -> Package:
if vcs != "git":
raise ValueError(f"Unsupported VCS dependency {vcs}")
- suffix = url.split("/")[-1].rstrip(".git")
- tmp_dir = Path(mkdtemp(prefix=f"pypoetry-git-{suffix}"))
-
- try:
- git = Git()
- git.clone(url, tmp_dir)
- reference = branch or tag or rev
- if reference is not None:
- git.checkout(reference, tmp_dir)
- else:
- reference = "HEAD"
-
- revision = git.rev_parse(reference, tmp_dir).strip()
-
- package = cls.get_package_from_directory(tmp_dir, name=name)
- package._source_type = "git"
- package._source_url = url
- package._source_reference = reference
- package._source_resolved_reference = revision
- except Exception:
- raise
- finally:
- safe_rmtree(str(tmp_dir))
-
- return package
+ return _get_package_from_git(
+ url=url, branch=branch, tag=tag, rev=rev, source_root=source_root
+ )
def search_for_file(self, dependency: FileDependency) -> list[Package]:
if dependency in self._deferred_cache:
- dependency, _package = self._deferred_cache[dependency]
+ _package = self._deferred_cache[dependency]
package = _package.clone()
else:
@@ -226,14 +285,9 @@ def search_for_file(self, dependency: FileDependency) -> list[Package]:
dependency._constraint = package.version
dependency._pretty_constraint = package.version.text
- self._deferred_cache[dependency] = (dependency, package)
+ self._deferred_cache[dependency] = package
- if dependency.name != package.name:
- # For now, the dependency's name must match the actual package's name
- raise RuntimeError(
- f"The dependency name for {dependency.name} does not match the actual"
- f" package's name: {package.name}"
- )
+ self.validate_package_for_dependency(dependency=dependency, package=package)
if dependency.base is not None:
package.root_dir = dependency.base
@@ -259,18 +313,18 @@ def get_package_from_file(cls, file_path: Path) -> Package:
def search_for_directory(self, dependency: DirectoryDependency) -> list[Package]:
if dependency in self._deferred_cache:
- dependency, _package = self._deferred_cache[dependency]
+ _package = self._deferred_cache[dependency]
package = _package.clone()
else:
- package = self.get_package_from_directory(
- dependency.full_path, name=dependency.name
- )
+ package = self.get_package_from_directory(dependency.full_path)
dependency._constraint = package.version
dependency._pretty_constraint = package.version.text
- self._deferred_cache[dependency] = (dependency, package)
+ self._deferred_cache[dependency] = package
+
+ self.validate_package_for_dependency(dependency=dependency, package=package)
package.develop = dependency.develop
@@ -280,21 +334,8 @@ def search_for_directory(self, dependency: DirectoryDependency) -> list[Package]
return [package]
@classmethod
- def get_package_from_directory(
- cls, directory: Path, name: str | None = None
- ) -> Package:
- package = PackageInfo.from_directory(path=directory).to_package(
- root_dir=directory
- )
-
- if name and name != package.name:
- # For now, the dependency's name must match the actual package's name
- raise RuntimeError(
- f"The dependency name for {name} does not match the actual package's"
- f" name: {package.name}"
- )
-
- return package
+ def get_package_from_directory(cls, directory: Path) -> Package:
+ return PackageInfo.from_directory(path=directory).to_package(root_dir=directory)
def search_for_url(self, dependency: URLDependency) -> list[Package]:
if dependency in self._deferred_cache:
@@ -302,12 +343,7 @@ def search_for_url(self, dependency: URLDependency) -> list[Package]:
package = self.get_package_from_url(dependency.url)
- if dependency.name != package.name:
- # For now, the dependency's name must match the actual package's name
- raise RuntimeError(
- f"The dependency name for {dependency.name} does not match the actual"
- f" package's name: {package.name}"
- )
+ self.validate_package_for_dependency(dependency=dependency, package=package)
for extra in dependency.extras:
if extra in package.extras:
@@ -514,59 +550,50 @@ def complete_package(self, package: DependencyPackage) -> DependencyPackage:
# An example of this is:
# - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
# - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
- duplicates: dict[str, list[Dependency]] = {}
+ #
+ # Additional care has to be taken to ensure that hidden constraints like that
+ # of source type, reference etc. are taking into consideration when duplicates
+ # are identified.
+ duplicates: dict[
+ tuple[str, str | None, str | None, str | None], list[Dependency]
+ ] = {}
for dep in dependencies:
- if dep.complete_name not in duplicates:
- duplicates[dep.complete_name] = []
-
- duplicates[dep.complete_name].append(dep)
+ key = (
+ dep.complete_name,
+ dep.source_type,
+ dep.source_url,
+ dep.source_reference,
+ )
+ if key not in duplicates:
+ duplicates[key] = []
+ duplicates[key].append(dep)
dependencies = []
- for dep_name, deps in duplicates.items():
+ for key, deps in duplicates.items():
if len(deps) == 1:
dependencies.append(deps[0])
continue
+ extra_keys = ", ".join(k for k in key[1:] if k is not None)
+ dep_name = f"{key[0]} ({extra_keys})" if extra_keys else key[0]
+
self.debug(f"Duplicate dependencies for {dep_name}")
deps = self._merge_dependencies_by_marker(deps)
+ deps = self._merge_dependencies_by_constraint(deps)
- # Merging dependencies by constraint
- by_constraint: dict[VersionConstraint, list[Dependency]] = defaultdict(list)
- for dep in deps:
- by_constraint[dep.constraint].append(dep)
- for constraint, _deps in by_constraint.items():
- new_markers = []
- for dep in _deps:
- marker = dep.marker.without_extras()
- if marker.is_any():
- # No marker or only extras
- continue
-
- new_markers.append(marker)
-
- if not new_markers:
- continue
-
- dep = _deps[0]
- dep.marker = dep.marker.union(MarkerUnion(*new_markers))
- by_constraint[constraint] = [dep]
-
- continue
-
- if len(by_constraint) == 1:
+ if len(deps) == 1:
self.debug(f"Merging requirements for {deps[0]!s}")
- dependencies.append(list(by_constraint.values())[0][0])
+ dependencies.append(deps[0])
continue
# We leave dependencies as-is if they have the same
# python/platform constraints.
# That way the resolver will pickup the conflict
# and display a proper error.
- _deps = [value[0] for value in by_constraint.values()]
seen = set()
- for _dep in _deps:
- pep_508_dep = _dep.to_pep_508(False)
+ for dep in deps:
+ pep_508_dep = dep.to_pep_508(False)
if ";" not in pep_508_dep:
_requirements = ""
else:
@@ -575,9 +602,9 @@ def complete_package(self, package: DependencyPackage) -> DependencyPackage:
if _requirements not in seen:
seen.add(_requirements)
- if len(_deps) != len(seen):
- for _dep in _deps:
- dependencies.append(_dep)
+ if len(deps) != len(seen):
+ for dep in deps:
+ dependencies.append(dep)
continue
@@ -592,7 +619,6 @@ def complete_package(self, package: DependencyPackage) -> DependencyPackage:
# with the following overrides:
# - {=2.0)>}
# - {}
- _deps = [_dep[0] for _dep in by_constraint.values()]
def fmt_warning(d: Dependency) -> str:
marker = d.marker if not d.marker.is_any() else "*"
@@ -601,8 +627,8 @@ def fmt_warning(d: Dependency) -> str:
f" with markers {marker}"
)
- warnings = ", ".join(fmt_warning(d) for d in _deps[:-1])
- warnings += f" and {fmt_warning(_deps[-1])}"
+ warnings = ", ".join(fmt_warning(d) for d in deps[:-1])
+ warnings += f" and {fmt_warning(deps[-1])}"
self.debug(
f"Different requirements found for {warnings}."
)
@@ -624,8 +650,8 @@ def fmt_warning(d: Dependency) -> str:
# - foo (!= 1.2.1) ; python == 3.10
#
# the constraint for the second entry will become (!= 1.2.1, >= 1.2)
- any_markers_dependencies = [d for d in _deps if d.marker.is_any()]
- other_markers_dependencies = [d for d in _deps if not d.marker.is_any()]
+ any_markers_dependencies = [d for d in deps if d.marker.is_any()]
+ other_markers_dependencies = [d for d in deps if not d.marker.is_any()]
marker = other_markers_dependencies[0].marker
for other_dep in other_markers_dependencies[1:]:
@@ -639,7 +665,9 @@ def fmt_warning(d: Dependency) -> str:
dep_other.set_constraint(
dep_other.constraint.intersect(dep_any.constraint)
)
- elif not inverted_marker.is_empty():
+ elif not inverted_marker.is_empty() and self._python_constraint.allows_any(
+ get_python_constraint_from_marker(inverted_marker)
+ ):
# if there is no any marker dependency
# and the inverted marker is not empty,
# a dependency with the inverted union of all markers is required
@@ -651,22 +679,22 @@ def fmt_warning(d: Dependency) -> str:
#
# the last dependency would be missed without this,
# because the intersection with both foo dependencies is empty
- inverted_marker_dep = _deps[0].with_constraint(EmptyConstraint())
+ inverted_marker_dep = deps[0].with_constraint(EmptyConstraint())
inverted_marker_dep.marker = inverted_marker
- _deps.append(inverted_marker_dep)
+ deps.append(inverted_marker_dep)
overrides = []
- overrides_marker_intersection = AnyMarker()
+ overrides_marker_intersection: BaseMarker = AnyMarker()
for dep_overrides in self._overrides.values():
- for _dep in dep_overrides.values():
+ for dep in dep_overrides.values():
overrides_marker_intersection = (
- overrides_marker_intersection.intersect(_dep.marker)
+ overrides_marker_intersection.intersect(dep.marker)
)
- for _dep in _deps:
- if not overrides_marker_intersection.intersect(_dep.marker).is_empty():
+ for dep in deps:
+ if not overrides_marker_intersection.intersect(dep.marker).is_empty():
current_overrides = self._overrides.copy()
package_overrides = current_overrides.get(package, {}).copy()
- package_overrides.update({_dep.name: _dep})
+ package_overrides.update({dep.name: dep})
current_overrides.update({package: package_overrides})
overrides.append(current_overrides)
@@ -802,7 +830,9 @@ def progress(self) -> Iterator[None]:
self._io.write_line("Resolving dependencies...")
yield
else:
- indicator = Indicator(self._io, "{message} ({elapsed:2s})")
+ indicator = Indicator(
+ self._io, "{message}{context}({elapsed:2s})"
+ )
with indicator.auto(
"Resolving dependencies...",
@@ -812,6 +842,31 @@ def progress(self) -> Iterator[None]:
self._in_progress = False
+ def _merge_dependencies_by_constraint(
+ self, dependencies: Iterable[Dependency]
+ ) -> list[Dependency]:
+ by_constraint: dict[VersionConstraint, list[Dependency]] = defaultdict(list)
+ for dep in dependencies:
+ by_constraint[dep.constraint].append(dep)
+ for constraint, _deps in by_constraint.items():
+ new_markers = []
+ for dep in _deps:
+ marker = dep.marker.without_extras()
+ if marker.is_any():
+ # No marker or only extras
+ continue
+
+ new_markers.append(marker)
+
+ if not new_markers:
+ continue
+
+ dep = _deps[0]
+ dep.marker = dep.marker.union(MarkerUnion(*new_markers))
+ by_constraint[constraint] = [dep]
+
+ return [value[0] for value in by_constraint.values()]
+
def _merge_dependencies_by_marker(
self, dependencies: Iterable[Dependency]
) -> list[Dependency]:
diff --git a/src/poetry/puzzle/solver.py b/src/poetry/puzzle/solver.py
index 47ceaeb9771..f4a441f20f3 100644
--- a/src/poetry/puzzle/solver.py
+++ b/src/poetry/puzzle/solver.py
@@ -5,10 +5,10 @@
from collections import defaultdict
from contextlib import contextmanager
from typing import TYPE_CHECKING
-from typing import Callable
from typing import FrozenSet
from typing import Iterator
from typing import Tuple
+from typing import TypeVar
try:
@@ -49,7 +49,7 @@ def __init__(
locked: Repository,
io: IO,
provider: Provider | None = None,
- ):
+ ) -> None:
self._package = package
self._pool = pool
self._installed = installed
@@ -60,7 +60,7 @@ def __init__(
provider = Provider(self._package, self._pool, self._io)
self._provider = provider
- self._overrides: list[dict] = []
+ self._overrides: list[dict[DependencyPackage, dict[str, Dependency]]] = []
@property
def provider(self) -> Provider:
@@ -71,7 +71,7 @@ def use_environment(self, env: Env) -> Iterator[None]:
with self.provider.use_environment(env):
yield
- def solve(self, use_latest: list[str] = None) -> Transaction:
+ def solve(self, use_latest: list[str] | None = None) -> Transaction:
from poetry.puzzle.transaction import Transaction
with self._provider.progress():
@@ -97,7 +97,9 @@ def solve(self, use_latest: list[str] = None) -> Transaction:
)
def solve_in_compatibility_mode(
- self, overrides: tuple[dict, ...], use_latest: list[str] = None
+ self,
+ overrides: tuple[dict[DependencyPackage, dict[str, Dependency]], ...],
+ use_latest: list[str] | None = None,
) -> tuple[list[Package], list[int]]:
packages = []
@@ -125,17 +127,22 @@ def solve_in_compatibility_mode(
return packages, depths
- def _solve(self, use_latest: list[str] = None) -> tuple[list[Package], list[int]]:
+ def _solve(
+ self, use_latest: list[str] | None = None
+ ) -> tuple[list[Package], list[int]]:
if self._provider._overrides:
self._overrides.append(self._provider._overrides)
- locked = defaultdict(list)
+ locked: dict[str, list[DependencyPackage]] = defaultdict(list)
for package in self._locked.packages:
locked[package.name].append(
DependencyPackage(package.to_dependency(), package)
)
- for packages in locked.values():
- packages.sort(key=lambda package: package.version, reverse=True)
+ for dependency_packages in locked.values():
+ dependency_packages.sort(
+ key=lambda p: p.package.version, # type: ignore[no-any-return]
+ reverse=True,
+ )
try:
result = resolve_version(
@@ -148,11 +155,8 @@ def _solve(self, use_latest: list[str] = None) -> tuple[list[Package], list[int]
except SolveFailure as e:
raise SolverProblemError(e)
- results = dict(
- depth_first_search(
- PackageNode(self._package, packages), aggregate_package_nodes
- )
- )
+ combined_nodes = depth_first_search(PackageNode(self._package, packages))
+ results = dict(aggregate_package_nodes(nodes) for nodes in combined_nodes)
# Merging feature packages with base packages
final_packages = []
@@ -183,6 +187,8 @@ def _solve(self, use_latest: list[str] = None) -> tuple[list[Package], list[int]
DFSNodeID = Tuple[str, FrozenSet[str], bool]
+T = TypeVar("T", bound="DFSNode")
+
class DFSNode:
def __init__(self, id: DFSNodeID, name: str, base_name: str) -> None:
@@ -190,7 +196,7 @@ def __init__(self, id: DFSNodeID, name: str, base_name: str) -> None:
self.name = name
self.base_name = base_name
- def reachable(self) -> list:
+ def reachable(self: T) -> list[T]:
return []
def visit(self, parents: list[PackageNode]) -> None:
@@ -200,9 +206,7 @@ def __str__(self) -> str:
return str(self.id)
-def depth_first_search(
- source: PackageNode, aggregator: Callable
-) -> list[tuple[Package, int]]:
+def depth_first_search(source: PackageNode) -> list[list[PackageNode]]:
back_edges: dict[DFSNodeID, list[PackageNode]] = defaultdict(list)
visited: set[DFSNodeID] = set()
topo_sorted_nodes: list[PackageNode] = []
@@ -210,18 +214,18 @@ def depth_first_search(
dfs_visit(source, back_edges, visited, topo_sorted_nodes)
# Combine the nodes by name
- combined_nodes = defaultdict(list)
+ combined_nodes: dict[str, list[PackageNode]] = defaultdict(list)
for node in topo_sorted_nodes:
node.visit(back_edges[node.id])
combined_nodes[node.name].append(node)
- combined_topo_sorted_nodes = [
+ combined_topo_sorted_nodes: list[list[PackageNode]] = [
combined_nodes.pop(node.name)
for node in topo_sorted_nodes
if node.name in combined_nodes
]
- return [aggregator(nodes) for nodes in combined_topo_sorted_nodes]
+ return combined_topo_sorted_nodes
def dfs_visit(
diff --git a/src/poetry/puzzle/transaction.py b/src/poetry/puzzle/transaction.py
index e54fd964b08..ffe86e3fa8d 100644
--- a/src/poetry/puzzle/transaction.py
+++ b/src/poetry/puzzle/transaction.py
@@ -6,7 +6,7 @@
if TYPE_CHECKING:
from poetry.core.packages.package import Package
- from poetry.installation.operations import OperationTypes
+ from poetry.installation.operations.operation import Operation
class Transaction:
@@ -28,12 +28,12 @@ def __init__(
def calculate_operations(
self, with_uninstalls: bool = True, synchronize: bool = False
- ) -> list[OperationTypes]:
- from poetry.installation.operations.install import Install
- from poetry.installation.operations.uninstall import Uninstall
- from poetry.installation.operations.update import Update
+ ) -> list[Operation]:
+ from poetry.installation.operations import Install
+ from poetry.installation.operations import Uninstall
+ from poetry.installation.operations import Update
- operations: list[OperationTypes] = []
+ operations: list[Operation] = []
for result_package, priority in self._result_packages:
installed = False
diff --git a/src/poetry/py.typed b/src/poetry/py.typed
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/src/poetry/repositories/__init__.py b/src/poetry/repositories/__init__.py
index af0d42a9e4a..ca887e51aa5 100644
--- a/src/poetry/repositories/__init__.py
+++ b/src/poetry/repositories/__init__.py
@@ -2,3 +2,6 @@
from poetry.repositories.pool import Pool
from poetry.repositories.repository import Repository
+
+
+__all__ = ["Pool", "Repository"]
diff --git a/src/poetry/repositories/cached.py b/src/poetry/repositories/cached.py
index 12d57fbacf4..8f9a5ed9b53 100644
--- a/src/poetry/repositories/cached.py
+++ b/src/poetry/repositories/cached.py
@@ -3,8 +3,8 @@
from abc import ABC
from abc import abstractmethod
from typing import TYPE_CHECKING
+from typing import Any
-from cachecontrol.caches import FileCache
from cachy import CacheManager
from poetry.core.semver.helpers import parse_constraint
@@ -21,7 +21,7 @@
class CachedRepository(Repository, ABC):
CACHE_VERSION = parse_constraint("1.0.0")
- def __init__(self, name: str, cache_group: str, disable_cache: bool = False):
+ def __init__(self, name: str, disable_cache: bool = False) -> None:
super().__init__(name)
self._disable_cache = disable_cache
self._cache_dir = REPOSITORY_CACHE_DIR / name
@@ -36,10 +36,9 @@ def __init__(self, name: str, cache_group: str, disable_cache: bool = False):
},
}
)
- self._cache_control_cache = FileCache(str(self._cache_dir / cache_group))
@abstractmethod
- def _get_release_info(self, name: str, version: str) -> dict:
+ def _get_release_info(self, name: str, version: str) -> dict[str, Any]:
raise NotImplementedError()
def get_release_info(self, name: str, version: str) -> PackageInfo:
@@ -75,6 +74,6 @@ def package(
self,
name: str,
version: str,
- extras: (list | None) = None,
+ extras: list[str] | None = None,
) -> Package:
return self.get_release_info(name, version).to_package(name=name, extras=extras)
diff --git a/src/poetry/repositories/http.py b/src/poetry/repositories/http.py
index 4561e6fff02..f6cc6fea204 100644
--- a/src/poetry/repositories/http.py
+++ b/src/poetry/repositories/http.py
@@ -1,36 +1,35 @@
from __future__ import annotations
-import contextlib
import hashlib
import os
import urllib
+import urllib.parse
from abc import ABC
from collections import defaultdict
from pathlib import Path
from typing import TYPE_CHECKING
-from urllib.parse import quote
+from typing import Any
import requests
-import requests.auth
-from cachecontrol import CacheControl
from poetry.core.packages.dependency import Dependency
from poetry.core.packages.utils.link import Link
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.utils.helpers import temporary_directory
from poetry.core.version.markers import parse_marker
-from poetry.config.config import Config
from poetry.repositories.cached import CachedRepository
from poetry.repositories.exceptions import PackageNotFound
from poetry.repositories.exceptions import RepositoryError
from poetry.repositories.link_sources.html import HTMLPage
from poetry.utils.authenticator import Authenticator
from poetry.utils.helpers import download_file
-from poetry.utils.helpers import temporary_directory
from poetry.utils.patterns import wheel_file_re
if TYPE_CHECKING:
+ from poetry.config.config import Config
from poetry.inspection.info import PackageInfo
@@ -41,41 +40,19 @@ def __init__(
url: str,
config: Config | None = None,
disable_cache: bool = False,
- cert: Path | None = None,
- client_cert: Path | None = None,
) -> None:
- super().__init__(name, "_http", disable_cache)
+ super().__init__(name, disable_cache)
self._url = url
- self._client_cert = client_cert
- self._cert = cert
-
self._authenticator = Authenticator(
- config=config or Config(use_environment=True)
- )
-
- self._session = CacheControl(
- self._authenticator.session, cache=self._cache_control_cache
+ config=config,
+ cache_id=name,
+ disable_cache=disable_cache,
)
-
- username, password = self._authenticator.get_credentials_for_url(self._url)
- if username is not None and password is not None:
- self._authenticator.session.auth = requests.auth.HTTPBasicAuth(
- username, password
- )
-
- if self._cert:
- self._authenticator.session.verify = str(self._cert)
-
- if self._client_cert:
- self._authenticator.session.cert = str(self._client_cert)
+ self._authenticator.add_repository(name, url)
@property
- def session(self) -> CacheControl:
- return self._session
-
- def __del__(self) -> None:
- with contextlib.suppress(AttributeError):
- self._session.close()
+ def session(self) -> Authenticator:
+ return self._authenticator
@property
def url(self) -> str:
@@ -83,22 +60,21 @@ def url(self) -> str:
@property
def cert(self) -> Path | None:
- return self._cert
+ cert = self._authenticator.get_certs_for_url(self.url).get("verify")
+ if cert:
+ return Path(cert)
+ return None
@property
def client_cert(self) -> Path | None:
- return self._client_cert
+ cert = self._authenticator.get_certs_for_url(self.url).get("cert")
+ if cert:
+ return Path(cert)
+ return None
@property
def authenticated_url(self) -> str:
- if not self._session.auth:
- return self.url
-
- parsed = urllib.parse.urlparse(self.url)
- username = quote(self._session.auth.username, safe="")
- password = quote(self._session.auth.password, safe="")
-
- return f"{parsed.scheme}://{username}:{password}@{parsed.netloc}{parsed.path}"
+ return self._authenticator.authenticated_url(url=self.url)
def _download(self, url: str, dest: str) -> None:
return download_file(url, dest, session=self.session)
@@ -173,6 +149,14 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
info = self._get_info_from_wheel(universal_python2_wheel)
py3_info = self._get_info_from_wheel(universal_python3_wheel)
+
+ if info.requires_python or py3_info.requires_python:
+ info.requires_python = str(
+ parse_constraint(info.requires_python or "^2.7").union(
+ parse_constraint(py3_info.requires_python or "^3")
+ )
+ )
+
if py3_info.requires_dist:
if not info.requires_dist:
info.requires_dist = py3_info.requires_dist
@@ -225,14 +209,14 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
return self._get_info_from_sdist(urls["sdist"][0])
- def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict:
+ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]:
if not links:
raise PackageNotFound(
f'No valid distribution links found for package: "{data.name}" version:'
f' "{data.version}"'
)
urls = defaultdict(list)
- files = []
+ files: list[dict[str, Any]] = []
for link in links:
if link.is_wheel:
urls["bdist_wheel"].append(link.url)
@@ -244,7 +228,8 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict:
file_hash = f"{link.hash_name}:{link.hash}" if link.hash else None
if not link.hash or (
- link.hash_name not in ("sha256", "sha384", "sha512")
+ link.hash_name is not None
+ and link.hash_name not in ("sha256", "sha384", "sha512")
and hasattr(hashlib, link.hash_name)
):
with temporary_directory() as temp_dir:
@@ -284,7 +269,7 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict:
def _get_response(self, endpoint: str) -> requests.Response | None:
url = self._url + endpoint
try:
- response = self.session.get(url)
+ response: requests.Response = self.session.get(url, raise_for_status=False)
if response.status_code in (401, 403):
self._log(
f"Authorization error accessing {url}",
diff --git a/src/poetry/repositories/installed_repository.py b/src/poetry/repositories/installed_repository.py
index e1568df41c4..d7a6d9abb6e 100644
--- a/src/poetry/repositories/installed_repository.py
+++ b/src/poetry/repositories/installed_repository.py
@@ -77,13 +77,10 @@ def get_package_paths(cls, env: Env, name: str) -> set[Path]:
@classmethod
def get_package_vcs_properties_from_path(cls, src: Path) -> tuple[str, str, str]:
- from poetry.core.vcs.git import Git
+ from poetry.vcs.git import Git
- git = Git()
- revision = git.rev_parse("HEAD", src).strip()
- url = git.remote_url(src)
-
- return "git", url, revision
+ info = Git.info(repo=src)
+ return "git", info.origin, info.revision
@classmethod
def is_vcs_package(cls, package: Path | Package, env: Env) -> bool:
@@ -106,7 +103,7 @@ def create_package_from_distribution(
) -> Package:
# We first check for a direct_url.json file to determine
# the type of package.
- path = Path(str(distribution._path))
+ path = Path(str(distribution._path)) # type: ignore[attr-defined]
if (
path.name.endswith(".dist-info")
@@ -166,13 +163,17 @@ def create_package_from_distribution(
source_reference=source_reference,
source_resolved_reference=source_resolved_reference,
)
- package.description = distribution.metadata.get("summary", "")
+
+ package.description = distribution.metadata.get( # type: ignore[attr-defined]
+ "summary",
+ "",
+ )
return package
@classmethod
def create_package_from_pep610(cls, distribution: metadata.Distribution) -> Package:
- path = Path(str(distribution._path))
+ path = Path(str(distribution._path)) # type: ignore[attr-defined]
source_type = None
source_url = None
source_reference = None
@@ -216,7 +217,10 @@ def create_package_from_pep610(cls, distribution: metadata.Distribution) -> Pack
develop=develop,
)
- package.description = distribution.metadata.get("summary", "")
+ package.description = distribution.metadata.get( # type: ignore[attr-defined]
+ "summary",
+ "",
+ )
return package
@@ -232,15 +236,17 @@ def load(cls, env: Env, with_dependencies: bool = False) -> InstalledRepository:
for entry in reversed(env.sys_path):
for distribution in sorted(
- metadata.distributions(path=[entry]),
- key=lambda d: str(d._path),
+ metadata.distributions( # type: ignore[no-untyped-call]
+ path=[entry],
+ ),
+ key=lambda d: str(d._path), # type: ignore[attr-defined]
):
name = canonicalize_name(distribution.metadata["name"])
if name in seen:
continue
- path = Path(str(distribution._path))
+ path = Path(str(distribution._path)) # type: ignore[attr-defined]
try:
path.relative_to(_VENDORS)
diff --git a/src/poetry/repositories/legacy_repository.py b/src/poetry/repositories/legacy_repository.py
index a065ab5bafc..11d14af1cb3 100644
--- a/src/poetry/repositories/legacy_repository.py
+++ b/src/poetry/repositories/legacy_repository.py
@@ -1,6 +1,7 @@
from __future__ import annotations
from typing import TYPE_CHECKING
+from typing import Any
from poetry.core.packages.package import Package
from poetry.core.semver.version import Version
@@ -13,8 +14,6 @@
if TYPE_CHECKING:
- from pathlib import Path
-
from poetry.core.packages.dependency import Dependency
from poetry.core.packages.utils.link import Link
@@ -28,15 +27,11 @@ def __init__(
url: str,
config: Config | None = None,
disable_cache: bool = False,
- cert: Path | None = None,
- client_cert: Path | None = None,
) -> None:
if name == "pypi":
raise ValueError("The name [pypi] is reserved for repositories")
- super().__init__(
- name, url.rstrip("/"), config, disable_cache, cert, client_cert
- )
+ super().__init__(name, url.rstrip("/"), config, disable_cache)
def find_packages(self, dependency: Dependency) -> list[Package]:
packages = []
@@ -126,7 +121,7 @@ def find_links_for_package(self, package: Package) -> list[Link]:
return list(page.links_for_version(package.name, package.version))
- def _get_release_info(self, name: str, version: str) -> dict:
+ def _get_release_info(self, name: str, version: str) -> dict[str, Any]:
page = self._get_page(f"/{canonicalize_name(name).replace('.', '-')}/")
if page is None:
raise PackageNotFound(f'No package named "{name}"')
diff --git a/src/poetry/repositories/link_sources/base.py b/src/poetry/repositories/link_sources/base.py
index ff3dccacf9a..a20f19f96ca 100644
--- a/src/poetry/repositories/link_sources/base.py
+++ b/src/poetry/repositories/link_sources/base.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-import contextlib
+import logging
import re
from abc import abstractmethod
@@ -19,6 +19,9 @@
from poetry.core.packages.utils.link import Link
+logger = logging.getLogger(__name__)
+
+
class LinkSource:
VERSION_REGEX = re.compile(r"(?i)([a-z0-9_\-.]+?)-(?=\d)([a-z0-9_.!+-]+)")
CLEAN_REGEX = re.compile(r"[^a-z0-9$&+,/:;=?@.#%_\\|-]", re.I)
@@ -46,7 +49,7 @@ def versions(self, name: str) -> Iterator[Version]:
for link in self.links:
pkg = self.link_package_data(link)
- if pkg.name == name and pkg.version and pkg.version not in seen:
+ if pkg and pkg.name == name and pkg.version not in seen:
seen.add(pkg.version)
yield pkg.version
@@ -55,7 +58,7 @@ def packages(self) -> Iterator[Package]:
for link in self.links:
pkg = self.link_package_data(link)
- if pkg.name and pkg.version:
+ if pkg:
yield pkg
@property
@@ -63,23 +66,34 @@ def packages(self) -> Iterator[Package]:
def links(self) -> Iterator[Link]:
raise NotImplementedError()
- def link_package_data(self, link: Link) -> Package:
- name, version = None, None
+ @classmethod
+ def link_package_data(cls, link: Link) -> Package | None:
+ name, version_string, version = None, None, None
m = wheel_file_re.match(link.filename) or sdist_file_re.match(link.filename)
if m:
name = canonicalize_name(m.group("name"))
- version = m.group("ver")
+ version_string = m.group("ver")
else:
info, ext = link.splitext()
- match = self.VERSION_REGEX.match(info)
+ match = cls.VERSION_REGEX.match(info)
if match:
- version = match.group(2)
-
- with contextlib.suppress(ValueError):
- version = Version.parse(version)
-
- return Package(name, version, source_url=link.url)
+ name = match.group(1)
+ version_string = match.group(2)
+
+ if version_string:
+ try:
+ version = Version.parse(version_string)
+ except ValueError:
+ logger.debug(
+ "Skipping url (%s) due to invalid version (%s)", link.url, version
+ )
+ return None
+
+ pkg = None
+ if name and version:
+ pkg = Package(name, version, source_url=link.url)
+ return pkg
def links_for_version(self, name: str, version: Version) -> Iterator[Link]:
name = canonicalize_name(name)
@@ -87,7 +101,7 @@ def links_for_version(self, name: str, version: Version) -> Iterator[Link]:
for link in self.links:
pkg = self.link_package_data(link)
- if pkg.name == name and pkg.version and pkg.version == version:
+ if pkg and pkg.name == name and pkg.version == version:
yield link
def clean_link(self, url: str) -> str:
diff --git a/src/poetry/repositories/pool.py b/src/poetry/repositories/pool.py
index 40acfc4c026..c396052da1d 100644
--- a/src/poetry/repositories/pool.py
+++ b/src/poetry/repositories/pool.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-from contextlib import suppress
from typing import TYPE_CHECKING
from poetry.repositories.exceptions import PackageNotFound
@@ -123,7 +122,11 @@ def has_package(self, package: Package) -> bool:
raise NotImplementedError()
def package(
- self, name: str, version: str, extras: list[str] = None, repository: str = None
+ self,
+ name: str,
+ version: str,
+ extras: list[str] | None = None,
+ repository: str | None = None,
) -> Package:
if repository is not None:
repository = repository.lower()
@@ -136,19 +139,15 @@ def package(
raise ValueError(f'Repository "{repository}" does not exist.')
if repository is not None and not self._ignore_repository_names:
- with suppress(PackageNotFound):
- return self.repository(repository).package(name, version, extras=extras)
- else:
- for repo in self._repositories:
- try:
- package = repo.package(name, version, extras=extras)
- except PackageNotFound:
- continue
+ return self.repository(repository).package(name, version, extras=extras)
- if package:
- self._packages.append(package)
+ for repo in self._repositories:
+ try:
+ package = repo.package(name, version, extras=extras)
+ except PackageNotFound:
+ continue
- return package
+ return package
raise PackageNotFound(f"Package {name} ({version}) not found.")
diff --git a/src/poetry/repositories/pypi_repository.py b/src/poetry/repositories/pypi_repository.py
index cb35e69c69b..97089ca2750 100644
--- a/src/poetry/repositories/pypi_repository.py
+++ b/src/poetry/repositories/pypi_repository.py
@@ -4,6 +4,7 @@
from collections import defaultdict
from typing import TYPE_CHECKING
+from typing import Any
import requests
@@ -128,7 +129,7 @@ def search(self, query: str) -> list[Package]:
return results
- def get_package_info(self, name: str) -> dict:
+ def get_package_info(self, name: str) -> dict[str, Any]:
"""
Return the package information given its name.
@@ -138,11 +139,12 @@ def get_package_info(self, name: str) -> dict:
if self._disable_cache:
return self._get_package_info(name)
- return self._cache.store("packages").remember_forever(
+ package_info: dict[str, Any] = self._cache.store("packages").remember_forever(
name, lambda: self._get_package_info(name)
)
+ return package_info
- def _get_package_info(self, name: str) -> dict:
+ def _get_package_info(self, name: str) -> dict[str, Any]:
data = self._get(f"pypi/{name}/json")
if data is None:
raise PackageNotFound(f"Package [{name}] not found.")
@@ -226,16 +228,17 @@ def _get_release_info(
return data.asdict()
- def _get(self, endpoint: str) -> dict | None:
+ def _get(self, endpoint: str) -> dict[str, Any] | None:
try:
json_response = self.session.get(self._base_url + endpoint)
except requests.exceptions.TooManyRedirects:
# Cache control redirect loop.
# We try to remove the cache and try again
- self._cache_control_cache.delete(self._base_url + endpoint)
+ self.session.delete_cache(self._base_url + endpoint)
json_response = self.session.get(self._base_url + endpoint)
if json_response.status_code == 404:
return None
- return json_response.json()
+ json: dict[str, Any] = json_response.json()
+ return json
diff --git a/src/poetry/repositories/repository.py b/src/poetry/repositories/repository.py
index 7a91bd23d5d..63e169b9e3f 100644
--- a/src/poetry/repositories/repository.py
+++ b/src/poetry/repositories/repository.py
@@ -8,16 +8,19 @@
from poetry.core.semver.version_constraint import VersionConstraint
from poetry.core.semver.version_range import VersionRange
+from poetry.repositories.exceptions import PackageNotFound
+
if TYPE_CHECKING:
from poetry.core.packages.dependency import Dependency
from poetry.core.packages.package import Package
from poetry.core.packages.utils.link import Link
- from poetry.core.semver.helpers import VersionTypes
class Repository:
- def __init__(self, name: str = None, packages: list[Package] = None) -> None:
+ def __init__(
+ self, name: str | None = None, packages: list[Package] | None = None
+ ) -> None:
self._name = name
self._packages: list[Package] = []
@@ -94,7 +97,7 @@ def search(self, query: str) -> list[Package]:
@staticmethod
def _get_constraints_from_dependency(
dependency: Dependency,
- ) -> tuple[VersionTypes, bool]:
+ ) -> tuple[VersionConstraint, bool]:
constraint = dependency.constraint
if constraint is None:
constraint = "*"
@@ -115,7 +118,7 @@ def _get_constraints_from_dependency(
def _log(self, msg: str, level: str = "info") -> None:
logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
- getattr(logger, level)(f"{self.name}: {msg}")
+ getattr(logger, level)(f"Source ({self.name}): {msg}")
def __len__(self) -> int:
return len(self._packages)
@@ -131,3 +134,5 @@ def package(
for package in self.packages:
if name == package.name and package.version.text == version:
return package.clone()
+
+ raise PackageNotFound(f"Package {name} ({version}) not found.")
diff --git a/src/poetry/repositories/single_page_repository.py b/src/poetry/repositories/single_page_repository.py
new file mode 100644
index 00000000000..e8de0b141f8
--- /dev/null
+++ b/src/poetry/repositories/single_page_repository.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from poetry.repositories.legacy_repository import LegacyRepository
+from poetry.repositories.link_sources.html import SimpleRepositoryPage
+
+
+class SinglePageRepository(LegacyRepository):
+ def _get_page(self, endpoint: str | None = None) -> SimpleRepositoryPage | None:
+ """
+ Single page repositories only have one page irrespective of endpoint.
+ """
+ response = self._get_response("")
+ if not response:
+ return None
+ return SimpleRepositoryPage(response.url, response.text)
diff --git a/src/poetry/utils/_compat.py b/src/poetry/utils/_compat.py
index fe0ef434d19..193c72d2172 100644
--- a/src/poetry/utils/_compat.py
+++ b/src/poetry/utils/_compat.py
@@ -9,12 +9,12 @@
# compatibility for python <3.8
import importlib_metadata as metadata
else:
- from importlib import metadata # noqa: F401, TC002
+ from importlib import metadata
WINDOWS = sys.platform == "win32"
-def decode(string: str, encodings: list[str] | None = None) -> str:
+def decode(string: bytes | str, encodings: list[str] | None = None) -> str:
if not isinstance(string, bytes):
return string
@@ -49,3 +49,6 @@ def list_to_shell_command(cmd: list[str]) -> str:
f'"{token}"' if " " in token and token[0] not in {"'", '"'} else token
for token in cmd
)
+
+
+__all__ = ["WINDOWS", "decode", "encode", "list_to_shell_command", "metadata", "to_str"]
diff --git a/src/poetry/utils/appdirs.py b/src/poetry/utils/appdirs.py
deleted file mode 100644
index 2d2f3f21985..00000000000
--- a/src/poetry/utils/appdirs.py
+++ /dev/null
@@ -1,231 +0,0 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-from __future__ import annotations
-
-import os
-import sys
-
-
-WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
-
-
-def expanduser(path: str) -> str:
- """
- Expand ~ and ~user constructions.
-
- Includes a workaround for http://bugs.python.org/issue14768
- """
- expanded = os.path.expanduser(path)
- if path.startswith("~/") and expanded.startswith("//"):
- expanded = expanded[1:]
- return expanded
-
-
-def user_cache_dir(appname: str) -> str:
- r"""
- Return full path to the user-specific cache dir for this application.
-
- "appname" is the name of application.
-
- Typical user cache directories are:
- macOS: ~/Library/Caches/
- Unix: ~/.cache/ (XDG default)
- Windows: C:\Users\\AppData\Local\\Cache
-
- On Windows the only suggestion in the MSDN docs is that local settings go
- in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
- non-roaming app data dir (the default returned by `user_data_dir`). Apps
- typically put cache data somewhere *under* the given dir here. Some
- examples:
- ...\Mozilla\Firefox\Profiles\\Cache
- ...\Acme\SuperApp\Cache\1.0
-
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
- """
- if WINDOWS:
- # Get the base path
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
- # Add our app name and Cache directory to it
- path = os.path.join(path, appname, "Cache")
- elif sys.platform == "darwin":
- # Get the base path
- path = expanduser("~/Library/Caches")
-
- # Add our app name to it
- path = os.path.join(path, appname)
- else:
- # Get the base path
- path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
- # Add our app name to it
- path = os.path.join(path, appname)
-
- return path
-
-
-def user_data_dir(appname: str, roaming: bool = False) -> str:
- r"""
- Return full path to the user-specific data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: ~/Library/Application Support/
- Unix: ~/.local/share/ # or in
- $XDG_DATA_HOME, if defined
- Win XP (not roaming): C:\Documents and Settings\\ ...
- ...Application Data\
- Win XP (roaming): C:\Documents and Settings\\Local ...
- ...Settings\Application Data\
- Win 7 (not roaming): C:\Users\\AppData\Local\
- Win 7 (roaming): C:\Users\\AppData\Roaming\
-
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
- That means, by default "~/.local/share/".
- """
- if WINDOWS:
- const = "CSIDL_APPDATA" if roaming else "CSIDL_LOCAL_APPDATA"
- return os.path.join(os.path.normpath(_get_win_folder(const)), appname)
- elif sys.platform == "darwin":
- return os.path.join(expanduser("~/Library/Application Support/"), appname)
- else:
- return os.path.join(
- os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname
- )
-
-
-def user_config_dir(appname: str, roaming: bool = True) -> str:
- """Return full path to the user-specific config dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default True) can be set False to not use the
- Windows roaming appdata directory. That means that for users on a
- Windows network setup for roaming profiles, this user data will be
- sync'd on login. See
-
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: same as user_data_dir
- Unix: ~/.config/
- Win *: same as user_data_dir
-
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by default "~/.config/".
- """
- if WINDOWS:
- path = user_data_dir(appname, roaming=roaming)
- elif sys.platform == "darwin":
- path = user_data_dir(appname)
- else:
- path = os.getenv("XDG_CONFIG_HOME", expanduser("~/.config"))
- path = os.path.join(path, appname)
-
- return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see
-def site_config_dirs(appname: str) -> list[str]:
- r"""Return a list of potential user-shared config dirs for this application.
-
- "appname" is the name of application.
-
- Typical user config directories are:
- macOS: /Library/Application Support//
- Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in
- $XDG_CONFIG_DIRS
- Win XP: C:\Documents and Settings\All Users\Application ...
- ...Data\\
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
- on Vista.)
- Win 7: Hidden, but writeable on Win 7:
- C:\ProgramData\\
- """
- if WINDOWS:
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
- pathlist = [os.path.join(path, appname)]
- elif sys.platform == "darwin":
- pathlist = [os.path.join("/Library/Application Support", appname)]
- else:
- # try looking in $XDG_CONFIG_DIRS
- xdg_config_dirs = os.getenv("XDG_CONFIG_DIRS", "/etc/xdg")
- if xdg_config_dirs:
- pathlist = [
- os.path.join(expanduser(x), appname)
- for x in xdg_config_dirs.split(os.pathsep)
- ]
- else:
- pathlist = []
-
- # always look in /etc directly as well
- pathlist.append("/etc")
-
- return pathlist
-
-
-# -- Windows support functions --
-
-
-def _get_win_folder_from_registry(csidl_name: str) -> str:
- """
- This is a fallback technique at best. I'm not sure if using the
- registry for this guarantees us the correct answer for all CSIDL_*
- names.
- """
- import _winreg
-
- shell_folder_name = {
- "CSIDL_APPDATA": "AppData",
- "CSIDL_COMMON_APPDATA": "Common AppData",
- "CSIDL_LOCAL_APPDATA": "Local AppData",
- }[csidl_name]
-
- key = _winreg.OpenKey(
- _winreg.HKEY_CURRENT_USER,
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders",
- )
- directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
- return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name: str) -> str:
- csidl_const = {
- "CSIDL_APPDATA": 26,
- "CSIDL_COMMON_APPDATA": 35,
- "CSIDL_LOCAL_APPDATA": 28,
- }[csidl_name]
-
- buf = ctypes.create_unicode_buffer(1024)
- windll = ctypes.windll # type: ignore[attr-defined]
- windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
- # Downgrade to short path name if have highbit chars. See
- # .
- has_high_char = any(ord(c) > 255 for c in buf)
- if has_high_char:
- buf2 = ctypes.create_unicode_buffer(1024)
- if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
- buf = buf2
-
- return buf.value
-
-
-if WINDOWS:
- try:
- import ctypes
-
- _get_win_folder = _get_win_folder_with_ctypes
- except ImportError:
- _get_win_folder = _get_win_folder_from_registry
diff --git a/src/poetry/utils/authenticator.py b/src/poetry/utils/authenticator.py
index eb25805e855..d460aee555b 100644
--- a/src/poetry/utils/authenticator.py
+++ b/src/poetry/utils/authenticator.py
@@ -1,20 +1,29 @@
from __future__ import annotations
+import contextlib
+import dataclasses
+import functools
import logging
import time
import urllib.parse
+from os.path import commonprefix
from typing import TYPE_CHECKING
from typing import Any
-from typing import Iterator
import requests
import requests.auth
import requests.exceptions
+from cachecontrol import CacheControl
+from cachecontrol.caches import FileCache
+
+from poetry.config.config import Config
from poetry.exceptions import PoetryException
+from poetry.locations import REPOSITORY_CACHE_DIR
from poetry.utils.helpers import get_cert
from poetry.utils.helpers import get_client_cert
+from poetry.utils.password_manager import HTTPAuthCredential
from poetry.utils.password_manager import PasswordManager
@@ -23,46 +32,148 @@
from cleo.io.io import IO
- from poetry.config.config import Config
+
+logger = logging.getLogger(__name__)
-logger = logging.getLogger()
+@dataclasses.dataclass
+class AuthenticatorRepositoryConfig:
+ name: str
+ url: str
+ netloc: str = dataclasses.field(init=False)
+ path: str = dataclasses.field(init=False)
+
+ def __post_init__(self) -> None:
+ parsed_url = urllib.parse.urlsplit(self.url)
+ self.netloc = parsed_url.netloc
+ self.path = parsed_url.path
+
+ def certs(self, config: Config) -> dict[str, Path | None]:
+ return {
+ "cert": get_client_cert(config, self.name),
+ "verify": get_cert(config, self.name),
+ }
+
+ @property
+ def http_credential_keys(self) -> list[str]:
+ return [self.url, self.netloc, self.name]
+
+ def get_http_credentials(
+ self, password_manager: PasswordManager, username: str | None = None
+ ) -> HTTPAuthCredential:
+ # try with the repository name via the password manager
+ credential = HTTPAuthCredential(
+ **(password_manager.get_http_auth(self.name) or {})
+ )
+
+ if credential.password is None:
+ # fallback to url and netloc based keyring entries
+ credential = password_manager.keyring.get_credential(
+ self.url, self.netloc, username=credential.username
+ )
+
+ if credential.password is not None:
+ return HTTPAuthCredential(
+ username=credential.username, password=credential.password
+ )
+
+ return credential
class Authenticator:
- def __init__(self, config: Config, io: IO | None = None) -> None:
- self._config = config
+ def __init__(
+ self,
+ config: Config | None = None,
+ io: IO | None = None,
+ cache_id: str | None = None,
+ disable_cache: bool = False,
+ ) -> None:
+ self._config = config or Config.create()
self._io = io
- self._session: requests.Session | None = None
- self._credentials: dict[str, tuple[str, str]] = {}
+ self._sessions_for_netloc: dict[str, requests.Session] = {}
+ self._credentials: dict[str, HTTPAuthCredential] = {}
self._certs: dict[str, dict[str, Path | None]] = {}
+ self._configured_repositories: dict[
+ str, AuthenticatorRepositoryConfig
+ ] | None = None
self._password_manager = PasswordManager(self._config)
+ self._cache_control = (
+ FileCache(
+ str(REPOSITORY_CACHE_DIR / (cache_id or "_default_cache") / "_http")
+ )
+ if not disable_cache
+ else None
+ )
- def _log(self, message: str, level: str = "debug") -> None:
- if self._io is not None:
- self._io.write_line(f"<{level}>{message}{level}>")
- else:
- getattr(logger, level, logger.debug)(message)
+ @property
+ def cache(self) -> FileCache | None:
+ return self._cache_control
@property
- def session(self) -> requests.Session:
- if self._session is None:
- self._session = requests.Session()
+ def is_cached(self) -> bool:
+ return self._cache_control is not None
+
+ def create_session(self) -> requests.Session:
+ session = requests.Session()
+
+ if not self.is_cached:
+ return session
- return self._session
+ session = CacheControl(sess=session, cache=self._cache_control)
+ return session
+
+ def get_session(self, url: str | None = None) -> requests.Session:
+ if not url:
+ return self.create_session()
+
+ parsed_url = urllib.parse.urlsplit(url)
+ netloc = parsed_url.netloc
+
+ if netloc not in self._sessions_for_netloc:
+ logger.debug("Creating new session for %s", netloc)
+ self._sessions_for_netloc[netloc] = self.create_session()
+
+ return self._sessions_for_netloc[netloc]
+
+ def close(self) -> None:
+ for session in self._sessions_for_netloc.values():
+ if session is not None:
+ with contextlib.suppress(AttributeError):
+ session.close()
def __del__(self) -> None:
- if self._session is not None:
- self._session.close()
+ self.close()
+
+ def delete_cache(self, url: str) -> None:
+ if self.is_cached:
+ self._cache_control.delete(key=url)
- def request(self, method: str, url: str, **kwargs: Any) -> requests.Response:
+ def authenticated_url(self, url: str) -> str:
+ parsed = urllib.parse.urlparse(url)
+ credential = self.get_credentials_for_url(url)
+
+ if credential.username is not None and credential.password is not None:
+ username = urllib.parse.quote(credential.username, safe="")
+ password = urllib.parse.quote(credential.password, safe="")
+
+ return (
+ f"{parsed.scheme}://{username}:{password}@{parsed.netloc}{parsed.path}"
+ )
+
+ return url
+
+ def request(
+ self, method: str, url: str, raise_for_status: bool = True, **kwargs: Any
+ ) -> requests.Response:
request = requests.Request(method, url)
- username, password = self.get_credentials_for_url(url)
+ credential = self.get_credentials_for_url(url)
- if username is not None and password is not None:
- request = requests.auth.HTTPBasicAuth(username, password)(request)
+ if credential.username is not None or credential.password is not None:
+ request = requests.auth.HTTPBasicAuth(
+ credential.username or "", credential.password or ""
+ )(request)
- session = self.session
+ session = self.get_session(url=url)
prepared_request = session.prepare_request(request)
proxies = kwargs.get("proxies", {})
@@ -78,7 +189,7 @@ def request(self, method: str, url: str, **kwargs: Any) -> requests.Response:
if verify is not None:
verify = str(verify)
- settings = session.merge_environment_settings(
+ settings = session.merge_environment_settings( # type: ignore[no-untyped-call]
prepared_request.url, proxies, stream, verify, cert
)
@@ -100,31 +211,86 @@ def request(self, method: str, url: str, **kwargs: Any) -> requests.Response:
raise e
else:
if resp.status_code not in [502, 503, 504] or is_last_attempt:
- resp.raise_for_status()
+ if resp.status_code is not None and raise_for_status:
+ resp.raise_for_status()
return resp
if not is_last_attempt:
attempt += 1
delay = 0.5 * attempt
- self._log(f"Retrying HTTP request in {delay} seconds.", level="debug")
+ logger.debug(f"Retrying HTTP request in {delay} seconds.")
time.sleep(delay)
continue
# this should never really be hit under any sane circumstance
raise PoetryException("Failed HTTP {} request", method.upper())
- def get_credentials_for_url(self, url: str) -> tuple[str | None, str | None]:
- parsed_url = urllib.parse.urlsplit(url)
+ def get(self, url: str, **kwargs: Any) -> requests.Response:
+ return self.request("get", url, **kwargs)
- netloc = parsed_url.netloc
+ def post(self, url: str, **kwargs: Any) -> requests.Response:
+ return self.request("post", url, **kwargs)
+
+ def _get_credentials_for_repository(
+ self, repository: AuthenticatorRepositoryConfig, username: str | None = None
+ ) -> HTTPAuthCredential:
+ # cache repository credentials by repository url to avoid multiple keyring
+ # backend queries when packages are being downloaded from the same source
+ key = f"{repository.url}#username={username or ''}"
+
+ if key not in self._credentials:
+ self._credentials[key] = repository.get_http_credentials(
+ password_manager=self._password_manager, username=username
+ )
- credentials: tuple[str | None, str | None] = self._credentials.get(
- netloc, (None, None)
+ return self._credentials[key]
+
+ def _get_credentials_for_url(
+ self, url: str, exact_match: bool = False
+ ) -> HTTPAuthCredential:
+ repository = self.get_repository_config_for_url(url, exact_match)
+
+ credential = (
+ self._get_credentials_for_repository(repository=repository)
+ if repository is not None
+ else HTTPAuthCredential()
)
- if credentials == (None, None):
+ if credential.password is None:
+ parsed_url = urllib.parse.urlsplit(url)
+ netloc = parsed_url.netloc
+ credential = self._password_manager.keyring.get_credential(
+ url, netloc, username=credential.username
+ )
+
+ return HTTPAuthCredential(
+ username=credential.username, password=credential.password
+ )
+
+ return credential
+
+ def get_credentials_for_git_url(self, url: str) -> HTTPAuthCredential:
+ parsed_url = urllib.parse.urlsplit(url)
+
+ if parsed_url.scheme not in {"http", "https"}:
+ return HTTPAuthCredential()
+
+ key = f"git+{url}"
+
+ if key not in self._credentials:
+ self._credentials[key] = self._get_credentials_for_url(url, True)
+
+ return self._credentials[key]
+
+ def get_credentials_for_url(self, url: str) -> HTTPAuthCredential:
+ parsed_url = urllib.parse.urlsplit(url)
+ netloc = parsed_url.netloc
+
+ if url not in self._credentials:
if "@" not in netloc:
- credentials = self._get_credentials_for_netloc(netloc)
+ # no credentials were provided in the url, try finding the
+ # best repository configuration
+ self._credentials[url] = self._get_credentials_for_url(url)
else:
# Split from the right because that's how urllib.parse.urlsplit()
# behaves if more than one @ is present (which can be checked using
@@ -134,110 +300,113 @@ def get_credentials_for_url(self, url: str) -> tuple[str | None, str | None]:
# behaves if more than one : is present (which again can be checked
# using the password attribute of the return value)
user, password = auth.split(":", 1) if ":" in auth else (auth, "")
- credentials = (
+ self._credentials[url] = HTTPAuthCredential(
urllib.parse.unquote(user),
urllib.parse.unquote(password),
)
- if any(credential is not None for credential in credentials):
- credentials = (credentials[0] or "", credentials[1] or "")
- self._credentials[netloc] = credentials
-
- return credentials
+ return self._credentials[url]
def get_pypi_token(self, name: str) -> str | None:
return self._password_manager.get_pypi_token(name)
- def get_http_auth(self, name: str) -> dict[str, str | None] | None:
- return self._get_http_auth(name, None)
-
- def _get_http_auth(
- self, name: str, netloc: str | None
- ) -> dict[str, str | None] | None:
+ def get_http_auth(
+ self, name: str, username: str | None = None
+ ) -> HTTPAuthCredential | None:
if name == "pypi":
- url = "https://upload.pypi.org/legacy/"
+ repository = AuthenticatorRepositoryConfig(
+ name, "https://upload.pypi.org/legacy/"
+ )
else:
- url = self._config.get(f"repositories.{name}.url")
- if not url:
+ if name not in self.configured_repositories:
return None
+ repository = self.configured_repositories[name]
- parsed_url = urllib.parse.urlsplit(url)
+ return self._get_credentials_for_repository(
+ repository=repository, username=username
+ )
- if netloc is None or netloc == parsed_url.netloc:
- auth = self._password_manager.get_http_auth(name)
- auth = auth or {}
+ @property
+ def configured_repositories(self) -> dict[str, AuthenticatorRepositoryConfig]:
+ if self._configured_repositories is None:
+ self._configured_repositories = {}
+ for repository_name in self._config.get("repositories", []):
+ url = self._config.get(f"repositories.{repository_name}.url")
+ self._configured_repositories[
+ repository_name
+ ] = AuthenticatorRepositoryConfig(repository_name, url)
- if auth.get("password") is None:
- username = auth.get("username")
- auth = self._get_credentials_for_netloc_from_keyring(
- url, parsed_url.netloc, username
- )
+ return self._configured_repositories
- return auth
+ def reset_credentials_cache(self) -> None:
+ self.get_repository_config_for_url.cache_clear()
+ self._credentials = {}
- return None
+ def add_repository(self, name: str, url: str) -> None:
+ self.configured_repositories[name] = AuthenticatorRepositoryConfig(name, url)
+ self.reset_credentials_cache()
- def _get_credentials_for_netloc(self, netloc: str) -> tuple[str | None, str | None]:
- for repository_name, _ in self._get_repository_netlocs():
- auth = self._get_http_auth(repository_name, netloc)
+ def get_certs_for_url(self, url: str) -> dict[str, Path | None]:
+ if url not in self._certs:
+ self._certs[url] = self._get_certs_for_url(url)
+ return self._certs[url]
+
+ @functools.lru_cache(maxsize=None)
+ def get_repository_config_for_url(
+ self, url: str, exact_match: bool = False
+ ) -> AuthenticatorRepositoryConfig | None:
+ parsed_url = urllib.parse.urlsplit(url)
+ candidates_netloc_only = []
+ candidates_path_match = []
- if auth is None:
+ for repository in self.configured_repositories.values():
+ if exact_match:
+ if parsed_url.path == repository.path:
+ return repository
continue
- return auth.get("username"), auth.get("password")
+ if repository.netloc == parsed_url.netloc:
+ if parsed_url.path.startswith(repository.path) or commonprefix(
+ (parsed_url.path, repository.path)
+ ):
+ candidates_path_match.append(repository)
+ continue
+ candidates_netloc_only.append(repository)
+
+ if candidates_path_match:
+ candidates = candidates_path_match
+ elif candidates_netloc_only:
+ candidates = candidates_netloc_only
+ else:
+ return None
- return None, None
+ if len(candidates) > 1:
+ logger.debug(
+ "Multiple source configurations found for %s - %s",
+ parsed_url.netloc,
+ ", ".join(map(lambda c: c.name, candidates)),
+ )
+ # prefer the more specific path
+ candidates.sort(
+ key=lambda c: len(commonprefix([parsed_url.path, c.path])), reverse=True
+ )
- def get_certs_for_url(self, url: str) -> dict[str, Path | None]:
- parsed_url = urllib.parse.urlsplit(url)
+ return candidates[0]
- netloc = parsed_url.netloc
+ def _get_certs_for_url(self, url: str) -> dict[str, Path | None]:
+ selected = self.get_repository_config_for_url(url)
+ if selected:
+ return selected.certs(config=self._config)
+ return {"cert": None, "verify": None}
- return self._certs.setdefault(
- netloc,
- self._get_certs_for_netloc_from_config(netloc),
- )
- def _get_repository_netlocs(self) -> Iterator[tuple[str, str]]:
- for repository_name in self._config.get("repositories", []):
- url = self._config.get(f"repositories.{repository_name}.url")
- parsed_url = urllib.parse.urlsplit(url)
- yield repository_name, parsed_url.netloc
-
- def _get_credentials_for_netloc_from_keyring(
- self, url: str, netloc: str, username: str | None
- ) -> dict[str, str | None] | None:
- import keyring
-
- cred = keyring.get_credential(url, username)
- if cred is not None:
- return {
- "username": cred.username,
- "password": cred.password,
- }
-
- cred = keyring.get_credential(netloc, username)
- if cred is not None:
- return {
- "username": cred.username,
- "password": cred.password,
- }
-
- if username:
- return {
- "username": username,
- "password": None,
- }
-
- return None
-
- def _get_certs_for_netloc_from_config(self, netloc: str) -> dict[str, Path | None]:
- certs: dict[str, Path | None] = {"cert": None, "verify": None}
-
- for repository_name, repository_netloc in self._get_repository_netlocs():
- if netloc == repository_netloc:
- certs["cert"] = get_client_cert(self._config, repository_name)
- certs["verify"] = get_cert(self._config, repository_name)
- break
-
- return certs
+_authenticator: Authenticator | None = None
+
+
+def get_default_authenticator() -> Authenticator:
+ global _authenticator
+
+ if _authenticator is None:
+ _authenticator = Authenticator()
+
+ return _authenticator
diff --git a/src/poetry/utils/dependency_specification.py b/src/poetry/utils/dependency_specification.py
new file mode 100644
index 00000000000..57d029599d1
--- /dev/null
+++ b/src/poetry/utils/dependency_specification.py
@@ -0,0 +1,213 @@
+from __future__ import annotations
+
+import contextlib
+import os
+import re
+import urllib.parse
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+from typing import Dict
+from typing import List
+from typing import TypeVar
+from typing import Union
+from typing import cast
+
+from poetry.core.packages.dependency import Dependency
+from poetry.core.packages.vcs_dependency import VCSDependency
+from tomlkit.items import InlineTable
+
+from poetry.puzzle.provider import Provider
+
+
+if TYPE_CHECKING:
+ from poetry.utils.env import Env
+
+
+DependencySpec = Dict[str, Union[str, bool, Dict[str, Union[str, bool]], List[str]]]
+
+
+def _parse_dependency_specification_git_url(
+ requirement: str, env: Env | None = None
+) -> DependencySpec | None:
+ from poetry.core.vcs.git import Git
+ from poetry.core.vcs.git import ParsedUrl
+
+ parsed = ParsedUrl.parse(requirement)
+ url = Git.normalize_url(requirement)
+
+ pair = {"name": parsed.name, "git": url.url}
+ if parsed.rev:
+ pair["rev"] = url.revision
+
+ source_root = env.path.joinpath("src") if env else None
+ package = Provider.get_package_from_vcs(
+ "git", url=url.url, rev=pair.get("rev"), source_root=source_root
+ )
+ pair["name"] = package.name
+ return pair
+
+
+def _parse_dependency_specification_url(
+ requirement: str, env: Env | None = None
+) -> DependencySpec | None:
+ url_parsed = urllib.parse.urlparse(requirement)
+ if not (url_parsed.scheme and url_parsed.netloc):
+ return None
+
+ if url_parsed.scheme in ["git+https", "git+ssh"]:
+ return _parse_dependency_specification_git_url(requirement, env)
+
+ if url_parsed.scheme in ["http", "https"]:
+ package = Provider.get_package_from_url(requirement)
+ return {"name": package.name, "url": cast(str, package.source_url)}
+
+ return None
+
+
+def _parse_dependency_specification_path(
+ requirement: str, cwd: Path
+) -> DependencySpec | None:
+ if (os.path.sep in requirement or "/" in requirement) and (
+ cwd.joinpath(requirement).exists()
+ or Path(requirement).expanduser().exists()
+ and Path(requirement).expanduser().is_absolute()
+ ):
+ path = Path(requirement).expanduser()
+ is_absolute = path.is_absolute()
+
+ if not path.is_absolute():
+ path = cwd.joinpath(requirement)
+
+ if path.is_file():
+ package = Provider.get_package_from_file(path.resolve())
+ else:
+ package = Provider.get_package_from_directory(path.resolve())
+
+ return {
+ "name": package.name,
+ "path": path.relative_to(cwd).as_posix()
+ if not is_absolute
+ else path.as_posix(),
+ }
+
+ return None
+
+
+def _parse_dependency_specification_simple(
+ requirement: str,
+) -> DependencySpec | None:
+ extras: list[str] = []
+ pair = re.sub("^([^@=: ]+)(?:@|==|(?~!])=|:| )(.*)$", "\\1 \\2", requirement)
+ pair = pair.strip()
+
+ require: DependencySpec = {}
+
+ if " " in pair:
+ name, version = pair.split(" ", 2)
+ extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
+ if extras_m:
+ extras = [e.strip() for e in extras_m.group(1).split(",")]
+ name, _ = name.split("[")
+
+ require["name"] = name
+ if version != "latest":
+ require["version"] = version
+ else:
+ m = re.match(r"^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip())
+ if m:
+ name, constraint = m.group(1), m.group(2)
+ extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
+ if extras_m:
+ extras = [e.strip() for e in extras_m.group(1).split(",")]
+ name, _ = name.split("[")
+
+ require["name"] = name
+ require["version"] = constraint
+ else:
+ extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair)
+ if extras_m:
+ extras = [e.strip() for e in extras_m.group(1).split(",")]
+ pair, _ = pair.split("[")
+
+ require["name"] = pair
+
+ if extras:
+ require["extras"] = extras
+
+ return require
+
+
+BaseSpec = TypeVar("BaseSpec", DependencySpec, InlineTable)
+
+
+def dependency_to_specification(
+ dependency: Dependency, specification: BaseSpec
+) -> BaseSpec:
+ if dependency.is_vcs():
+ dependency = cast(VCSDependency, dependency)
+ specification[dependency.vcs] = cast(str, dependency.source_url)
+ if dependency.reference:
+ specification["rev"] = dependency.reference
+ elif dependency.is_file() or dependency.is_directory():
+ specification["path"] = cast(str, dependency.source_url)
+ elif dependency.is_url():
+ specification["url"] = cast(str, dependency.source_url)
+ elif dependency.pretty_constraint != "*" and not dependency.constraint.is_empty():
+ specification["version"] = dependency.pretty_constraint
+
+ if not dependency.marker.is_any():
+ specification["markers"] = str(dependency.marker)
+
+ if dependency.extras:
+ specification["extras"] = sorted(dependency.extras)
+
+ return specification
+
+
+def pep508_to_dependency_specification(requirement: str) -> DependencySpec | None:
+ if " ; " not in requirement and re.search(r"@[\^~!=<>\d]", requirement):
+ # this is of the form package@, do not attempt to parse it
+ return None
+
+ with contextlib.suppress(ValueError):
+ dependency = Dependency.create_from_pep_508(requirement)
+ specification: DependencySpec = {}
+ specification = dependency_to_specification(dependency, specification)
+
+ if specification:
+ specification["name"] = dependency.name
+ return specification
+
+ return None
+
+
+def parse_dependency_specification(
+ requirement: str, env: Env | None = None, cwd: Path | None = None
+) -> DependencySpec:
+ requirement = requirement.strip()
+ cwd = cwd or Path.cwd()
+
+ specification = pep508_to_dependency_specification(requirement)
+
+ if specification is not None:
+ return specification
+
+ extras = []
+ extras_m = re.search(r"\[([\w\d,-_ ]+)\]$", requirement)
+ if extras_m:
+ extras = [e.strip() for e in extras_m.group(1).split(",")]
+ requirement, _ = requirement.split("[")
+
+ specification = (
+ _parse_dependency_specification_url(requirement, env=env)
+ or _parse_dependency_specification_path(requirement, cwd=cwd)
+ or _parse_dependency_specification_simple(requirement)
+ )
+
+ if specification:
+ if extras and "extras" not in specification:
+ specification["extras"] = extras
+ return specification
+
+ raise ValueError(f"Invalid dependency specification: {requirement}")
diff --git a/src/poetry/utils/env.py b/src/poetry/utils/env.py
index ab191ef97f7..8c5867893fb 100644
--- a/src/poetry/utils/env.py
+++ b/src/poetry/utils/env.py
@@ -7,10 +7,10 @@
import os
import platform
import re
-import shutil
import subprocess
import sys
import sysconfig
+import warnings
from contextlib import contextmanager
from copy import deepcopy
@@ -18,9 +18,9 @@
from subprocess import CalledProcessError
from typing import TYPE_CHECKING
from typing import Any
-from typing import ContextManager
from typing import Iterable
from typing import Iterator
+from typing import TypeVar
import packaging.tags
import tomlkit
@@ -31,9 +31,11 @@
from packaging.tags import interpreter_name
from packaging.tags import interpreter_version
from packaging.tags import sys_tags
+from poetry.core.poetry import Poetry
from poetry.core.semver.helpers import parse_constraint
from poetry.core.semver.version import Version
from poetry.core.toml.file import TOMLFile
+from poetry.core.utils.helpers import temporary_directory
from virtualenv.seed.wheels.embed import get_embed_wheel
from poetry.locations import CACHE_DIR
@@ -43,14 +45,15 @@
from poetry.utils._compat import metadata
from poetry.utils.helpers import is_dir_writable
from poetry.utils.helpers import paths_csv
-from poetry.utils.helpers import temporary_directory
+from poetry.utils.helpers import remove_directory
if TYPE_CHECKING:
from cleo.io.io import IO
from poetry.core.version.markers import BaseMarker
- from poetry.poetry import Poetry
+
+P = TypeVar("P", bound=Poetry)
GET_SYS_TAGS = f"""
@@ -365,7 +368,7 @@ def remove_distribution_files(self, distribution_name: str) -> list[Path]:
file.unlink()
if distribution._path.exists():
- shutil.rmtree(str(distribution._path))
+ remove_directory(str(distribution._path), force=True)
paths.append(distribution._path)
@@ -494,7 +497,7 @@ class EnvManager:
ENVS_FILE = "envs.toml"
- def __init__(self, poetry: Poetry) -> None:
+ def __init__(self, poetry: P) -> None:
self._poetry = poetry
def _full_python_path(self, python: str) -> str:
@@ -534,7 +537,7 @@ def _detect_active_python(self, io: IO) -> str:
def activate(self, python: str, io: IO) -> Env:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -627,7 +630,7 @@ def activate(self, python: str, io: IO) -> Env:
def deactivate(self, io: IO) -> None:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -653,7 +656,7 @@ def get(self, reload: bool = False) -> VirtualEnv | SystemEnv:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -694,7 +697,7 @@ def get(self, reload: bool = False) -> VirtualEnv | SystemEnv:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -724,7 +727,7 @@ def list(self, name: str | None = None) -> list[VirtualEnv]:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -744,7 +747,7 @@ def list(self, name: str | None = None) -> list[VirtualEnv]:
def remove(self, python: str) -> Env:
venv_path = self._poetry.config.get("virtualenvs.path")
if venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -866,7 +869,7 @@ def create_venv(
if root_venv:
venv_path = cwd / ".venv"
elif venv_path is None:
- venv_path = Path(CACHE_DIR) / "virtualenvs"
+ venv_path = CACHE_DIR / "virtualenvs"
else:
venv_path = Path(venv_path)
@@ -990,12 +993,6 @@ def create_venv(
venv,
executable=executable,
flags=self._poetry.config.get("virtualenvs.options"),
- # TODO: in a future version switch remove pip/setuptools/wheel
- # poetry does not need them these exists today to not break developer
- # environment assumptions
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
)
# venv detection:
@@ -1070,7 +1067,7 @@ def remove_venv(cls, path: Path | str) -> None:
path = Path(path)
assert path.is_dir()
try:
- shutil.rmtree(str(path))
+ remove_directory(path)
return
except OSError as e:
# Continue only if e.errno == 16
@@ -1085,7 +1082,7 @@ def remove_venv(cls, path: Path | str) -> None:
if file_path.is_file() or file_path.is_symlink():
file_path.unlink()
elif file_path.is_dir():
- shutil.rmtree(str(file_path))
+ remove_directory(file_path, force=True)
@classmethod
def get_system_env(cls, naive: bool = False) -> SystemEnv | GenericEnv:
@@ -1542,7 +1539,9 @@ def get_paths(self) -> dict[str, str]:
d = Distribution()
d.parse_config_files()
- obj = d.get_command_obj("install", create=True)
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", "setup.py install is deprecated")
+ obj = d.get_command_obj("install", create=True)
obj.finalize_options()
paths = sysconfig.get_paths().copy()
@@ -1833,10 +1832,7 @@ def _bin(self, bin: str) -> str:
def ephemeral_environment(
executable: str | Path | None = None,
flags: dict[str, bool] = None,
- with_pip: bool = False,
- with_wheel: bool | None = None,
- with_setuptools: bool | None = None,
-) -> ContextManager[VirtualEnv]:
+) -> Iterator[VirtualEnv]:
with temporary_directory() as tmp_dir:
# TODO: cache PEP 517 build environment corresponding to each project venv
venv_dir = Path(tmp_dir) / ".venv"
@@ -1844,13 +1840,52 @@ def ephemeral_environment(
path=venv_dir.as_posix(),
executable=executable,
flags=flags,
- with_pip=with_pip,
- with_wheel=with_wheel,
- with_setuptools=with_setuptools,
)
yield VirtualEnv(venv_dir, venv_dir)
+@contextmanager
+def build_environment(
+ poetry: P, env: Env | None = None, io: IO | None = None
+) -> Iterator[Env]:
+ """
+ If a build script is specified for the project, there could be additional build
+ time dependencies, eg: cython, setuptools etc. In these cases, we create an
+ ephemeral build environment with all requirements specified under
+ `build-system.requires` and return this. Otherwise, the given default project
+ environment is returned.
+ """
+ if not env or poetry.package.build_script:
+ with ephemeral_environment(executable=env.python if env else None) as venv:
+ overwrite = io and io.output.is_decorated() and not io.is_debug()
+ if io:
+ if not overwrite:
+ io.write_line("")
+
+ requires = [
+ f"{requirement}"
+ for requirement in poetry.pyproject.build_system.requires
+ ]
+
+ io.overwrite(
+ "Preparing build environment with build-system requirements"
+ f" {', '.join(requires)}"
+ )
+ venv.run_pip(
+ "install",
+ "--disable-pip-version-check",
+ "--ignore-installed",
+ *poetry.pyproject.build_system.requires,
+ )
+
+ if overwrite:
+ io.write_line("")
+
+ yield venv
+ else:
+ yield env
+
+
class MockEnv(NullEnv):
def __init__(
self,
@@ -1864,7 +1899,7 @@ def __init__(
marker_env: dict[str, Any] = None,
supported_tags: list[Tag] = None,
**kwargs: Any,
- ):
+ ) -> None:
super().__init__(**kwargs)
self._version_info = version_info
diff --git a/src/poetry/utils/helpers.py b/src/poetry/utils/helpers.py
index 0a34cf6e6a6..a0d670c4201 100644
--- a/src/poetry/utils/helpers.py
+++ b/src/poetry/utils/helpers.py
@@ -7,19 +7,19 @@
import tempfile
from collections.abc import Mapping
-from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
-from typing import Callable
-from typing import Iterator
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.core.packages.package import Package
from requests import Session
from poetry.config.config import Config
+ from poetry.utils.authenticator import Authenticator
_canonicalize_regex = re.compile("[-_]+")
@@ -33,20 +33,6 @@ def module_name(name: str) -> str:
return canonicalize_name(name).replace(".", "_").replace("-", "_")
-def _del_ro(action: Callable, name: str, exc: Exception) -> None:
- os.chmod(name, stat.S_IWRITE)
- os.remove(name)
-
-
-@contextmanager
-def temporary_directory(*args: Any, **kwargs: Any) -> Iterator[str]:
- name = tempfile.mkdtemp(*args, **kwargs)
-
- yield name
-
- shutil.rmtree(name, onerror=_del_ro)
-
-
def get_cert(config: Config, repository_name: str) -> Path | None:
cert = config.get(f"certificates.{repository_name}.cert")
if cert:
@@ -63,7 +49,7 @@ def get_client_cert(config: Config, repository_name: str) -> Path | None:
return None
-def _on_rm_error(func: Callable, path: str, exc_info: Exception) -> None:
+def _on_rm_error(func: Callable[[str], None], path: str, exc_info: Exception) -> None:
if not os.path.exists(path):
return
@@ -71,14 +57,24 @@ def _on_rm_error(func: Callable, path: str, exc_info: Exception) -> None:
func(path)
-def safe_rmtree(path: str) -> None:
+def remove_directory(
+ path: Path | str, *args: Any, force: bool = False, **kwargs: Any
+) -> None:
+ """
+ Helper function handle safe removal, and optionally forces stubborn file removal.
+ This is particularly useful when dist files are read-only or git writes read-only
+ files on Windows.
+
+ Internally, all arguments are passed to `shutil.rmtree`.
+ """
if Path(path).is_symlink():
return os.unlink(str(path))
- shutil.rmtree(path, onerror=_on_rm_error)
+ kwargs["onerror"] = kwargs.pop("onerror", _on_rm_error if force else None)
+ shutil.rmtree(path, *args, **kwargs)
-def merge_dicts(d1: dict, d2: dict) -> None:
+def merge_dicts(d1: dict[str, Any], d2: dict[str, Any]) -> None:
for k in d2.keys():
if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], Mapping):
merge_dicts(d1[k], d2[k])
@@ -89,30 +85,58 @@ def merge_dicts(d1: dict, d2: dict) -> None:
def download_file(
url: str,
dest: str,
- session: Session | None = None,
+ session: Authenticator | Session | None = None,
chunk_size: int = 1024,
) -> None:
import requests
+ from poetry.puzzle.provider import Indicator
+
get = requests.get if not session else session.get
response = get(url, stream=True)
response.raise_for_status()
- with open(dest, "wb") as f:
- for chunk in response.iter_content(chunk_size=chunk_size):
- if chunk:
- f.write(chunk)
+ set_indicator = False
+ with Indicator.context() as update_context:
+ update_context(f"Downloading {url}")
+
+ if "Content-Length" in response.headers:
+ try:
+ total_size = int(response.headers["Content-Length"])
+ except ValueError:
+ total_size = 0
+
+ fetched_size = 0
+ last_percent = 0
+
+ # if less than 1MB, we simply show that we're downloading
+ # but skip the updating
+ set_indicator = total_size > 1024 * 1024
+
+ with open(dest, "wb") as f:
+ for chunk in response.iter_content(chunk_size=chunk_size):
+ if chunk:
+ f.write(chunk)
+
+ if set_indicator:
+ fetched_size += len(chunk)
+ percent = (fetched_size * 100) // total_size
+ if percent > last_percent:
+ last_percent = percent
+ update_context(f"Downloading {url} {percent:3}%")
def get_package_version_display_string(
package: Package, root: Path | None = None
) -> str:
if package.source_type in ["file", "directory"] and root:
+ assert package.source_url is not None
path = Path(os.path.relpath(package.source_url, root.as_posix())).as_posix()
return f"{package.version} {path}"
- return package.full_pretty_version
+ pretty_version: str = package.full_pretty_version
+ return pretty_version
def paths_csv(paths: list[Path]) -> str:
diff --git a/src/poetry/utils/password_manager.py b/src/poetry/utils/password_manager.py
index b525bdbae45..69d0c0974d3 100644
--- a/src/poetry/utils/password_manager.py
+++ b/src/poetry/utils/password_manager.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import dataclasses
import logging
from contextlib import suppress
@@ -22,6 +23,12 @@ class KeyRingError(Exception):
pass
+@dataclasses.dataclass
+class HTTPAuthCredential:
+ username: str | None = dataclasses.field(default=None)
+ password: str | None = dataclasses.field(default=None)
+
+
class KeyRing:
def __init__(self, namespace: str) -> None:
self._namespace = namespace
@@ -32,6 +39,25 @@ def __init__(self, namespace: str) -> None:
def is_available(self) -> bool:
return self._is_available
+ def get_credential(
+ self, *names: str, username: str | None = None
+ ) -> HTTPAuthCredential:
+ default = HTTPAuthCredential(username=username, password=None)
+
+ if not self.is_available():
+ return default
+
+ import keyring
+
+ for name in names:
+ credential = keyring.get_credential(name, username)
+ if credential:
+ return HTTPAuthCredential(
+ username=credential.username, password=credential.password
+ )
+
+ return default
+
def get_password(self, name: str, username: str) -> str | None:
if not self.is_available():
return None
@@ -142,7 +168,8 @@ def set_pypi_token(self, name: str, token: str) -> None:
def get_pypi_token(self, name: str) -> str | None:
if not self.keyring.is_available():
- return self._config.get(f"pypi-token.{name}")
+ token: str | None = self._config.get(f"pypi-token.{name}")
+ return token
return self.keyring.get_password(name, "__token__")
diff --git a/src/poetry/utils/pip.py b/src/poetry/utils/pip.py
index 5e6d5835bc6..6367f647507 100644
--- a/src/poetry/utils/pip.py
+++ b/src/poetry/utils/pip.py
@@ -31,7 +31,7 @@ def pip_install(
# lot of packages.
args = ["install", "--disable-pip-version-check", "--prefix", str(environment.path)]
- if not is_wheel:
+ if not is_wheel and not editable:
args.insert(1, "--use-pep517")
if upgrade:
diff --git a/src/poetry/utils/setup_reader.py b/src/poetry/utils/setup_reader.py
index 1bc84450c63..8676b1eeea1 100644
--- a/src/poetry/utils/setup_reader.py
+++ b/src/poetry/utils/setup_reader.py
@@ -25,7 +25,7 @@ class SetupReader:
FILES = ["setup.py", "setup.cfg"]
@classmethod
- def read_from_directory(cls, directory: str | Path) -> dict[str, list | dict]:
+ def read_from_directory(cls, directory: str | Path) -> dict[str, Any]:
if isinstance(directory, str):
directory = Path(directory)
@@ -44,7 +44,7 @@ def read_from_directory(cls, directory: str | Path) -> dict[str, list | dict]:
return result
- def read_setup_py(self, filepath: str | Path) -> dict[str, list | dict]:
+ def read_setup_py(self, filepath: str | Path) -> dict[str, Any]:
if isinstance(filepath, str):
filepath = Path(filepath)
diff --git a/src/poetry/vcs/__init__.py b/src/poetry/vcs/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/src/poetry/vcs/git/__init__.py b/src/poetry/vcs/git/__init__.py
new file mode 100644
index 00000000000..9882df85833
--- /dev/null
+++ b/src/poetry/vcs/git/__init__.py
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from poetry.vcs.git.backend import Git
+
+
+__all__ = ["Git"]
diff --git a/src/poetry/vcs/git/backend.py b/src/poetry/vcs/git/backend.py
new file mode 100644
index 00000000000..70f8dd8ae46
--- /dev/null
+++ b/src/poetry/vcs/git/backend.py
@@ -0,0 +1,435 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+import re
+
+from pathlib import Path
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+
+from dulwich import porcelain
+from dulwich.client import HTTPUnauthorized
+from dulwich.client import get_transport_and_path
+from dulwich.config import ConfigFile
+from dulwich.config import parse_submodules
+from dulwich.errors import NotGitRepository
+from dulwich.refs import ANNOTATED_TAG_SUFFIX
+from dulwich.repo import Repo
+
+from poetry.console.exceptions import PoetrySimpleConsoleException
+from poetry.utils.authenticator import get_default_authenticator
+from poetry.utils.helpers import remove_directory
+
+
+if TYPE_CHECKING:
+ from dulwich.client import FetchPackResult
+ from dulwich.client import GitClient
+
+
+logger = logging.getLogger(__name__)
+
+
+def is_revision_sha(revision: str | None) -> bool:
+ return re.match(r"^\b[0-9a-f]{5,40}\b$", revision or "") is not None
+
+
+def annotated_tag(ref: str | bytes) -> bytes:
+ if isinstance(ref, str):
+ ref = ref.encode("utf-8")
+ return ref + ANNOTATED_TAG_SUFFIX
+
+
+@dataclasses.dataclass
+class GitRefSpec:
+ branch: str | None = None
+ revision: str | None = None
+ tag: str | None = None
+ ref: bytes = dataclasses.field(default_factory=lambda: b"HEAD")
+
+ def resolve(self, remote_refs: FetchPackResult) -> None:
+ """
+ Resolve the ref using the provided remote refs.
+ """
+ self._normalise(remote_refs=remote_refs)
+ self._set_head(remote_refs=remote_refs)
+
+ def _normalise(self, remote_refs: FetchPackResult) -> None:
+ """
+ Internal helper method to determine if given revision is
+ 1. a branch or tag; if so, set corresponding properties.
+ 2. a short sha; if so, resolve full sha and set as revision
+ """
+ if self.revision:
+ ref = f"refs/tags/{self.revision}".encode()
+ if ref in remote_refs.refs or annotated_tag(ref) in remote_refs.refs:
+ # this is a tag, incorrectly specified as a revision, tags take priority
+ self.tag = self.revision
+ self.revision = None
+ elif (
+ self.revision.encode("utf-8") in remote_refs.refs
+ or f"refs/heads/{self.revision}".encode() in remote_refs.refs
+ ):
+ # this is most likely a ref spec or a branch incorrectly specified
+ self.branch = self.revision
+ self.revision = None
+ elif (
+ self.branch
+ and f"refs/heads/{self.branch}".encode() not in remote_refs.refs
+ and (
+ f"refs/tags/{self.branch}".encode() in remote_refs.refs
+ or annotated_tag(f"refs/tags/{self.branch}") in remote_refs.refs
+ )
+ ):
+ # this is a tag incorrectly specified as a branch
+ self.tag = self.branch
+ self.branch = None
+
+ if self.revision and self.is_sha_short:
+ # revision is a short sha, resolve to full sha
+ short_sha = self.revision.encode("utf-8")
+ for sha in remote_refs.refs.values():
+ if sha.startswith(short_sha):
+ self.revision = sha.decode("utf-8")
+ break
+
+ def _set_head(self, remote_refs: FetchPackResult) -> None:
+ """
+ Internal helper method to populate ref and set it's sha as the remote's head
+ and default ref.
+ """
+ self.ref = remote_refs.symrefs[b"HEAD"]
+
+ if self.revision:
+ head = self.revision.encode("utf-8")
+ else:
+ if self.tag:
+ ref = f"refs/tags/{self.tag}".encode()
+ annotated = annotated_tag(ref)
+ self.ref = annotated if annotated in remote_refs.refs else ref
+ elif self.branch:
+ self.ref = (
+ self.branch.encode("utf-8")
+ if self.is_ref
+ else f"refs/heads/{self.branch}".encode()
+ )
+ head = remote_refs.refs[self.ref]
+
+ remote_refs.refs[self.ref] = remote_refs.refs[b"HEAD"] = head
+
+ @property
+ def key(self) -> str:
+ return self.revision or self.branch or self.tag or self.ref.decode("utf-8")
+
+ @property
+ def is_sha(self) -> bool:
+ return is_revision_sha(revision=self.revision)
+
+ @property
+ def is_ref(self) -> bool:
+ return self.branch is not None and self.branch.startswith("refs/")
+
+ @property
+ def is_sha_short(self) -> bool:
+ return self.revision is not None and self.is_sha and len(self.revision) < 40
+
+
+@dataclasses.dataclass
+class GitRepoLocalInfo:
+ repo: dataclasses.InitVar[Repo | Path | str]
+ origin: str = dataclasses.field(init=False)
+ revision: str = dataclasses.field(init=False)
+
+ def __post_init__(self, repo: Repo | Path | str) -> None:
+ repo = Git.as_repo(repo=repo) if not isinstance(repo, Repo) else repo
+ self.origin = Git.get_remote_url(repo=repo, remote="origin")
+ self.revision = Git.get_revision(repo=repo)
+
+
+class Git:
+ @staticmethod
+ def as_repo(repo: Path | str) -> Repo:
+ return Repo(str(repo))
+
+ @staticmethod
+ def get_remote_url(repo: Repo, remote: str = "origin") -> str:
+ with repo:
+ config = repo.get_config()
+ section = (b"remote", remote.encode("utf-8"))
+
+ url = ""
+ if config.has_section(section):
+ value = config.get(section, b"url")
+ assert value is not None
+ url = value.decode("utf-8")
+
+ return url
+
+ @staticmethod
+ def get_revision(repo: Repo) -> str:
+ with repo:
+ return repo.head().decode("utf-8")
+
+ @classmethod
+ def info(cls, repo: Repo | Path | str) -> GitRepoLocalInfo:
+ return GitRepoLocalInfo(repo=repo)
+
+ @staticmethod
+ def get_name_from_source_url(url: str) -> str:
+ return re.sub(r"(.git)?$", "", url.rsplit("/", 1)[-1])
+
+ @classmethod
+ def _fetch_remote_refs(cls, url: str, local: Repo) -> FetchPackResult:
+ """
+ Helper method to fetch remote refs.
+ """
+ client: GitClient
+ path: str
+
+ kwargs: dict[str, str] = {}
+ credentials = get_default_authenticator().get_credentials_for_git_url(url=url)
+
+ if credentials.password and credentials.username:
+ # we do this conditionally as otherwise, dulwich might complain if these
+ # parameters are passed in for an ssh url
+ kwargs["username"] = credentials.username
+ kwargs["password"] = credentials.password
+
+ client, path = get_transport_and_path(url, **kwargs)
+
+ with local:
+ result: FetchPackResult = client.fetch(
+ path,
+ local,
+ determine_wants=local.object_store.determine_wants_all,
+ )
+ return result
+
+ @staticmethod
+ def _clone_legacy(url: str, refspec: GitRefSpec, target: Path) -> Repo:
+ """
+ Helper method to facilitate fallback to using system provided git client via
+ subprocess calls.
+ """
+ from poetry.vcs.git.system import SystemGit
+
+ logger.debug("Cloning '%s' using system git client", url)
+
+ if target.exists():
+ remove_directory(path=target, force=True)
+
+ revision = refspec.tag or refspec.branch or refspec.revision or "HEAD"
+
+ try:
+ SystemGit.clone(url, target)
+ except CalledProcessError:
+ raise PoetrySimpleConsoleException(
+ f"Failed to clone {url}, check your git configuration and permissions"
+ " for this repository."
+ )
+
+ if revision:
+ revision.replace("refs/head/", "")
+ revision.replace("refs/tags/", "")
+
+ try:
+ SystemGit.checkout(revision, target)
+ except CalledProcessError:
+ raise PoetrySimpleConsoleException(
+ f"Failed to checkout {url} at '{revision}'"
+ )
+
+ repo = Repo(str(target))
+ return repo
+
+ @classmethod
+ def _clone(cls, url: str, refspec: GitRefSpec, target: Path) -> Repo:
+ """
+ Helper method to clone a remove repository at the given `url` at the specified
+ ref spec.
+ """
+ local: Repo
+ if not target.exists():
+ local = Repo.init(str(target), mkdir=True)
+ porcelain.remote_add(local, "origin", url)
+ else:
+ local = Repo(str(target))
+
+ remote_refs = cls._fetch_remote_refs(url=url, local=local)
+
+ logger.debug(
+ "Cloning %s> at '%s>' to %s>", url, refspec.key, target
+ )
+
+ try:
+ refspec.resolve(remote_refs=remote_refs)
+ except KeyError: # branch / ref does not exist
+ raise PoetrySimpleConsoleException(
+ f"Failed to clone {url} at '{refspec.key}', verify ref exists on"
+ " remote."
+ )
+
+ # ensure local HEAD matches remote
+ local.refs[b"HEAD"] = remote_refs.refs[b"HEAD"]
+
+ if refspec.is_ref:
+ # set ref to current HEAD
+ local.refs[refspec.ref] = local.refs[b"HEAD"]
+
+ for base, prefix in {
+ (b"refs/remotes/origin", b"refs/heads/"),
+ (b"refs/tags", b"refs/tags"),
+ }:
+ local.refs.import_refs(
+ base=base,
+ other={
+ n[len(prefix) :]: v
+ for (n, v) in remote_refs.refs.items()
+ if n.startswith(prefix) and not n.endswith(ANNOTATED_TAG_SUFFIX)
+ },
+ )
+
+ try:
+ with local:
+ local.reset_index()
+ except (AssertionError, KeyError) as e:
+ # this implies the ref we need does not exist or is invalid
+ if isinstance(e, KeyError):
+ # the local copy is at a bad state, lets remove it
+ logger.debug(
+ "Removing local clone (%s>) of repository as it is in a"
+ " broken state.",
+ local.path,
+ )
+ remove_directory(local.path, force=True)
+
+ if isinstance(e, AssertionError) and "Invalid object name" not in str(e):
+ raise
+
+ logger.debug(
+ "\nRequested ref (%s) was not fetched to local copy and cannot"
+ " be used. The following error was raised:\n\n\t%s>",
+ refspec.key,
+ e,
+ )
+
+ raise PoetrySimpleConsoleException(
+ f"Failed to clone {url} at '{refspec.key}', verify ref exists on"
+ " remote."
+ )
+
+ return local
+
+ @classmethod
+ def _clone_submodules(cls, repo: Repo) -> None:
+ """
+ Helper method to identify configured submodules and clone them recursively.
+ """
+ repo_root = Path(repo.path)
+ modules_config = repo_root.joinpath(".gitmodules")
+
+ if modules_config.exists():
+ config = ConfigFile.from_path(modules_config)
+
+ url: bytes
+ path: bytes
+ submodules = parse_submodules(config)
+ for path, url, _ in submodules:
+ path_relative = Path(path.decode("utf-8"))
+ path_absolute = repo_root.joinpath(path_relative)
+
+ source_root = path_absolute.parent
+ source_root.mkdir(parents=True, exist_ok=True)
+
+ with repo:
+ revision = repo.open_index()[path].sha.decode("utf-8")
+
+ cls.clone(
+ url=url.decode("utf-8"),
+ source_root=source_root,
+ name=path_relative.name,
+ revision=revision,
+ clean=path_absolute.exists()
+ and not path_absolute.joinpath(".git").is_dir(),
+ )
+
+ @staticmethod
+ def is_using_legacy_client() -> bool:
+ from poetry.config.config import Config
+
+ legacy_client: bool = (
+ Config.create().get("experimental", {}).get("system-git-client", False)
+ )
+ return legacy_client
+
+ @staticmethod
+ def get_default_source_root() -> Path:
+ from poetry.config.config import Config
+
+ return Path(Config.create().get("cache-dir")) / "src"
+
+ @classmethod
+ def clone(
+ cls,
+ url: str,
+ name: str | None = None,
+ branch: str | None = None,
+ tag: str | None = None,
+ revision: str | None = None,
+ source_root: Path | None = None,
+ clean: bool = False,
+ ) -> Repo:
+ source_root = source_root or cls.get_default_source_root()
+ source_root.mkdir(parents=True, exist_ok=True)
+
+ name = name or cls.get_name_from_source_url(url=url)
+ target = source_root / name
+ refspec = GitRefSpec(branch=branch, revision=revision, tag=tag)
+
+ if target.exists():
+ if clean:
+ # force clean the local copy if it exists, do not reuse
+ remove_directory(target, force=True)
+ else:
+ # check if the current local copy matches the requested ref spec
+ try:
+ current_repo = Repo(str(target))
+
+ with current_repo:
+ current_sha = current_repo.head().decode("utf-8")
+ except (NotGitRepository, AssertionError, KeyError):
+ # something is wrong with the current checkout, clean it
+ remove_directory(target, force=True)
+ else:
+ if not is_revision_sha(revision=current_sha):
+ # head is not a sha, this will cause issues later, lets reset
+ remove_directory(target, force=True)
+ elif (
+ refspec.is_sha
+ and refspec.revision is not None
+ and current_sha.startswith(refspec.revision)
+ ):
+ # if revision is used short-circuit remote fetch head matches
+ return current_repo
+
+ try:
+ if not cls.is_using_legacy_client():
+ local = cls._clone(url=url, refspec=refspec, target=target)
+ cls._clone_submodules(repo=local)
+ return local
+ except HTTPUnauthorized:
+ # we do this here to handle http authenticated repositories as dulwich
+ # does not currently support using credentials from git-credential helpers.
+ # upstream issue: https://github.com/jelmer/dulwich/issues/873
+ #
+ # this is a little inefficient, however preferred as this is transparent
+ # without additional configuration or changes for existing projects that
+ # use http basic auth credentials.
+ logger.debug(
+ "Unable to fetch from private repository '%s', falling back to"
+ " system git",
+ url,
+ )
+
+ # fallback to legacy git client
+ return cls._clone_legacy(url=url, refspec=refspec, target=target)
diff --git a/src/poetry/vcs/git/system.py b/src/poetry/vcs/git/system.py
new file mode 100644
index 00000000000..c1520539462
--- /dev/null
+++ b/src/poetry/vcs/git/system.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+import subprocess
+
+from typing import TYPE_CHECKING
+
+from dulwich.client import find_git_command
+
+
+if TYPE_CHECKING:
+ from pathlib import Path
+ from typing import Any
+
+
+class SystemGit:
+ @classmethod
+ def clone(cls, repository: str, dest: Path) -> str:
+ cls._check_parameter(repository)
+
+ return cls.run("clone", "--recurse-submodules", "--", repository, str(dest))
+
+ @classmethod
+ def checkout(cls, rev: str, target: Path | None = None) -> str:
+ args = []
+
+ if target:
+ args += [
+ "--git-dir",
+ (target / ".git").as_posix(),
+ "--work-tree",
+ target.as_posix(),
+ ]
+
+ cls._check_parameter(rev)
+
+ args += ["checkout", rev]
+
+ return cls.run(*args)
+
+ @staticmethod
+ def run(*args: Any, **kwargs: Any) -> str:
+ folder = kwargs.pop("folder", None)
+ if folder:
+ args = (
+ "--git-dir",
+ (folder / ".git").as_posix(),
+ "--work-tree",
+ folder.as_posix(),
+ ) + args
+
+ git_command = find_git_command()
+ return (
+ subprocess.check_output(git_command + list(args), stderr=subprocess.STDOUT)
+ .decode()
+ .strip()
+ )
+
+ @staticmethod
+ def _check_parameter(parameter: str) -> None:
+ """
+ Checks a git parameter to avoid unwanted code execution.
+ """
+ if parameter.strip().startswith("-"):
+ raise RuntimeError(f"Invalid Git parameter: {parameter}")
diff --git a/src/poetry/version/version_selector.py b/src/poetry/version/version_selector.py
index 342b7649ca3..adac9fcbf9e 100644
--- a/src/poetry/version/version_selector.py
+++ b/src/poetry/version/version_selector.py
@@ -21,7 +21,7 @@ def find_best_candidate(
target_package_version: str | None = None,
allow_prereleases: bool = False,
source: str | None = None,
- ) -> Package | bool:
+ ) -> Package | None:
"""
Given a package name and optional version,
returns the latest Package that matches
@@ -40,7 +40,7 @@ def find_best_candidate(
only_prereleases = all(c.version.is_unstable() for c in candidates)
if not candidates:
- return False
+ return None
package = None
for candidate in candidates:
@@ -55,8 +55,6 @@ def find_best_candidate(
if package is None or package.version < candidate.version:
package = candidate
- if package is None:
- return False
return package
def find_recommended_require_version(self, package: Package) -> str:
diff --git a/tests/compat.py b/tests/compat.py
index aae06035ea7..1e140560366 100644
--- a/tests/compat.py
+++ b/tests/compat.py
@@ -2,11 +2,21 @@
try:
- import zipp
+ import zipp # nopycln: import
except ImportError:
import zipfile as zipp # noqa: F401, TC002
try:
- from typing import Protocol
+ from typing import Protocol # nopycln: import
except ImportError:
from typing_extensions import Protocol # noqa: F401, TC002
+
+from poetry.core.semver.helpers import parse_constraint
+from poetry.core.semver.version import Version
+
+from poetry.utils._compat import metadata
+
+
+is_poetry_core_1_1_0a7_compat = not parse_constraint(">1.1.0a7").allows(
+ Version.parse(metadata.version("poetry-core"))
+)
diff --git a/tests/config/test_config.py b/tests/config/test_config.py
index 985a7666ecb..68794108dd2 100644
--- a/tests/config/test_config.py
+++ b/tests/config/test_config.py
@@ -4,7 +4,6 @@
import re
from typing import TYPE_CHECKING
-from typing import Callable
from typing import Iterator
import pytest
@@ -17,6 +16,7 @@
if TYPE_CHECKING:
+ from collections.abc import Callable
from pathlib import Path
diff --git a/tests/conftest.py b/tests/conftest.py
index 6253f50fbd2..6176b3e9ff2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -30,14 +30,18 @@
from poetry.utils.env import EnvManager
from poetry.utils.env import SystemEnv
from poetry.utils.env import VirtualEnv
+from poetry.utils.helpers import remove_directory
from tests.helpers import TestLocker
from tests.helpers import TestRepository
from tests.helpers import get_package
+from tests.helpers import isolated_environment
from tests.helpers import mock_clone
from tests.helpers import mock_download
if TYPE_CHECKING:
+ from _pytest.config import Config as PyTestConfig
+ from _pytest.config.argparsing import Parser
from pytest_mock import MockerFixture
from poetry.poetry import Poetry
@@ -45,6 +49,23 @@
from tests.types import ProjectFactory
+def pytest_addoption(parser: Parser) -> None:
+ parser.addoption(
+ "--integration",
+ action="store_true",
+ dest="integration",
+ default=False,
+ help="enable integration tests",
+ )
+
+
+def pytest_configure(config: PyTestConfig) -> None:
+ config.addinivalue_line("markers", "integration: mark integration tests")
+
+ if not config.option.integration:
+ config.option.markexpr = "not integration"
+
+
class Config(BaseConfig):
def get(self, setting_name: str, default: Any = None) -> Any:
self.merge(self._config_source.config)
@@ -185,21 +206,21 @@ def config(
c.set_config_source(config_source)
c.set_auth_config_source(auth_config_source)
- mocker.patch("poetry.factory.Factory.create_config", return_value=c)
+ mocker.patch("poetry.config.config.Config.create", return_value=c)
mocker.patch("poetry.config.config.Config.set_config_source")
return c
@pytest.fixture()
-def config_dir(tmp_dir: str) -> str:
- return tempfile.mkdtemp(prefix="poetry_config_", dir=tmp_dir)
+def config_dir(tmp_dir: str) -> Path:
+ return Path(tempfile.mkdtemp(prefix="poetry_config_", dir=tmp_dir))
@pytest.fixture(autouse=True)
-def mock_user_config_dir(mocker: MockerFixture, config_dir: str) -> None:
+def mock_user_config_dir(mocker: MockerFixture, config_dir: Path) -> None:
mocker.patch("poetry.locations.CONFIG_DIR", new=config_dir)
- mocker.patch("poetry.factory.CONFIG_DIR", new=config_dir)
+ mocker.patch("poetry.config.config.CONFIG_DIR", new=config_dir)
@pytest.fixture(autouse=True)
@@ -212,49 +233,39 @@ def download_mock(mocker: MockerFixture) -> None:
@pytest.fixture(autouse=True)
def pep517_metadata_mock(mocker: MockerFixture) -> None:
- @classmethod
- def _pep517_metadata(cls: PackageInfo, path: Path) -> PackageInfo:
+ def get_pep517_metadata(path: Path) -> PackageInfo:
with suppress(PackageInfoError):
return PackageInfo.from_setup_files(path)
return PackageInfo(name="demo", version="0.1.2")
mocker.patch(
- "poetry.inspection.info.PackageInfo._pep517_metadata",
- _pep517_metadata,
+ "poetry.inspection.info.get_pep517_metadata",
+ get_pep517_metadata,
)
@pytest.fixture
def environ() -> Iterator[None]:
- original_environ = dict(os.environ)
-
- yield
-
- os.environ.clear()
- os.environ.update(original_environ)
+ with isolated_environment():
+ yield
@pytest.fixture(autouse=True)
def isolate_environ() -> Iterator[None]:
"""Ensure the environment is isolated from user configuration."""
- original_environ = dict(os.environ)
-
- for var in os.environ:
- if var.startswith("POETRY_"):
- del os.environ[var]
-
- yield
+ with isolated_environment():
+ for var in os.environ:
+ if var.startswith("POETRY_") or var in {"PYTHONPATH", "VIRTUAL_ENV"}:
+ del os.environ[var]
- os.environ.clear()
- os.environ.update(original_environ)
+ yield
@pytest.fixture(autouse=True)
def git_mock(mocker: MockerFixture) -> None:
# Patch git module to not actually clone projects
- mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
- mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
- p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
+ mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+ p = mocker.patch("poetry.vcs.git.Git.get_revision")
p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
@@ -288,7 +299,7 @@ def tmp_dir() -> Iterator[str]:
yield dir_
- shutil.rmtree(dir_)
+ remove_directory(dir_, force=True)
@pytest.fixture
diff --git a/tests/console/commands/env/helpers.py b/tests/console/commands/env/helpers.py
index 97058c796cb..c9dc0a7a41a 100644
--- a/tests/console/commands/env/helpers.py
+++ b/tests/console/commands/env/helpers.py
@@ -3,12 +3,13 @@
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
-from typing import Callable
from poetry.core.semver.version import Version
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from poetry.core.version.pep440.version import PEP440Version
VERSION_3_7_1 = Version.parse("3.7.1")
diff --git a/tests/console/commands/env/test_use.py b/tests/console/commands/env/test_use.py
index 34bbf9ad0da..ba26064765c 100644
--- a/tests/console/commands/env/test_use.py
+++ b/tests/console/commands/env/test_use.py
@@ -71,10 +71,12 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file(
mock_build_env.assert_called_with(
venv_py37,
executable="/usr/bin/python3.7",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
envs_file = TOMLFile(venv_cache / "envs.toml")
diff --git a/tests/console/commands/plugin/conftest.py b/tests/console/commands/plugin/conftest.py
index 2c7f19f264c..025772c805f 100644
--- a/tests/console/commands/plugin/conftest.py
+++ b/tests/console/commands/plugin/conftest.py
@@ -37,7 +37,11 @@ def installed() -> InstalledRepository:
def configure_sources_factory(repo: TestRepository) -> SourcesFactory:
def _configure_sources(
- poetry: Poetry, sources: Source, config: Config, io: IO
+ poetry: Poetry,
+ sources: Source,
+ config: Config,
+ io: IO,
+ disable_cache: bool = False,
) -> None:
pool = Pool()
pool.add_repository(repo)
diff --git a/tests/console/commands/test_add.py b/tests/console/commands/test_add.py
index 987739b234e..586cc47b623 100644
--- a/tests/console/commands/test_add.py
+++ b/tests/console/commands/test_add.py
@@ -10,6 +10,7 @@
from poetry.core.semver.version import Version
from poetry.repositories.legacy_repository import LegacyRepository
+from tests.compat import is_poetry_core_1_1_0a7_compat
from tests.helpers import get_dependency
from tests.helpers import get_package
@@ -981,7 +982,7 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available(
tester.execute("foo")
expected = """\
-Using version ^1.2.3-beta.1 for foo
+Using version ^1.2.3b1 for foo
Updating dependencies
Resolving dependencies...
@@ -992,6 +993,8 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available(
• Installing foo (1.2.3b1)
"""
+ if is_poetry_core_1_1_0a7_compat:
+ expected = expected.replace("^1.2.3b1", "^1.2.3-beta.1")
assert expected in tester.io.fetch_output()
@@ -1912,7 +1915,7 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available_old_installer(
old_tester.execute("foo")
expected = """\
-Using version ^1.2.3-beta.1 for foo
+Using version ^1.2.3b1 for foo
Updating dependencies
Resolving dependencies...
@@ -1923,6 +1926,8 @@ def test_add_chooses_prerelease_if_only_prereleases_are_available_old_installer(
- Installing foo (1.2.3b1)
"""
+ if is_poetry_core_1_1_0a7_compat:
+ expected = expected.replace("^1.2.3b1", "^1.2.3-beta.1")
assert expected in old_tester.io.fetch_output()
diff --git a/tests/console/commands/test_config.py b/tests/console/commands/test_config.py
index 4d340ba8b33..bf5fb06ab67 100644
--- a/tests/console/commands/test_config.py
+++ b/tests/console/commands/test_config.py
@@ -7,10 +7,12 @@
import pytest
+from deepdiff import DeepDiff
from poetry.core.pyproject.exceptions import PyProjectException
from poetry.config.config_source import ConfigSource
from poetry.factory import Factory
+from tests.conftest import Config
if TYPE_CHECKING:
@@ -20,7 +22,6 @@
from pytest_mock import MockerFixture
from poetry.config.dict_config_source import DictConfigSource
- from tests.conftest import Config
from tests.types import CommandTesterFactory
from tests.types import FixtureDirGetter
@@ -51,11 +52,15 @@ def test_list_displays_default_value_if_not_set(
venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
expected = f"""cache-dir = {cache_dir}
experimental.new-installer = true
+experimental.system-git-client = false
installer.max-workers = null
+installer.no-binary = null
installer.parallel = true
virtualenvs.create = true
virtualenvs.in-project = null
virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
virtualenvs.options.system-site-packages = false
virtualenvs.path = {venv_path} # {config_cache_dir / 'virtualenvs'}
virtualenvs.prefer-active-python = false
@@ -75,11 +80,15 @@ def test_list_displays_set_get_setting(
venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
expected = f"""cache-dir = {cache_dir}
experimental.new-installer = true
+experimental.system-git-client = false
installer.max-workers = null
+installer.no-binary = null
installer.parallel = true
virtualenvs.create = false
virtualenvs.in-project = null
virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
virtualenvs.options.system-site-packages = false
virtualenvs.path = {venv_path} # {config_cache_dir / 'virtualenvs'}
virtualenvs.prefer-active-python = false
@@ -123,11 +132,15 @@ def test_list_displays_set_get_local_setting(
venv_path = json.dumps(os.path.join("{cache-dir}", "virtualenvs"))
expected = f"""cache-dir = {cache_dir}
experimental.new-installer = true
+experimental.system-git-client = false
installer.max-workers = null
+installer.no-binary = null
installer.parallel = true
virtualenvs.create = false
virtualenvs.in-project = null
virtualenvs.options.always-copy = false
+virtualenvs.options.no-pip = false
+virtualenvs.options.no-setuptools = false
virtualenvs.options.system-site-packages = false
virtualenvs.path = {venv_path} # {config_cache_dir / 'virtualenvs'}
virtualenvs.prefer-active-python = false
@@ -191,3 +204,33 @@ def test_config_installer_parallel(
"install"
)._command._installer._executor._max_workers
assert workers == 1
+
+
+@pytest.mark.parametrize(
+ ("value", "expected"),
+ [
+ ("true", [":all:"]),
+ ("1", [":all:"]),
+ ("false", [":none:"]),
+ ("0", [":none:"]),
+ ("pytest", ["pytest"]),
+ ("PyTest", ["pytest"]),
+ ("pytest,black", ["pytest", "black"]),
+ ("", []),
+ ],
+)
+def test_config_installer_no_binary(
+ tester: CommandTester, value: str, expected: list[str]
+) -> None:
+ setting = "installer.no-binary"
+
+ tester.execute(setting)
+ assert tester.io.fetch_output().strip() == "null"
+
+ config = Config.create()
+ assert not config.get(setting)
+
+ tester.execute(f"{setting} '{value}'")
+
+ config = Config.create(reload=True)
+ assert not DeepDiff(config.get(setting), expected, ignore_order=True)
diff --git a/tests/console/commands/test_install.py b/tests/console/commands/test_install.py
index a3545ef52c2..310a3593467 100644
--- a/tests/console/commands/test_install.py
+++ b/tests/console/commands/test_install.py
@@ -103,7 +103,8 @@ def test_group_options_are_passed_to_the_installer(
"""
mocker.patch.object(tester.command.installer, "run", return_value=0)
editable_builder_mock = mocker.patch(
- "poetry.masonry.builders.EditableBuilder", side_effect=ModuleOrPackageNotFound()
+ "poetry.masonry.builders.editable.EditableBuilder",
+ side_effect=ModuleOrPackageNotFound(),
)
if not with_root:
diff --git a/tests/console/commands/test_search.py b/tests/console/commands/test_search.py
index 47aa4d06316..185717df425 100644
--- a/tests/console/commands/test_search.py
+++ b/tests/console/commands/test_search.py
@@ -52,7 +52,7 @@ def test_search(tester: CommandTester, http: type[httpretty.httpretty]):
sqlalchemy-audit (0.1.0)
sqlalchemy-audit provides an easy way to set up revision tracking for your data.
-transmogrify.sqlalchemy (1.0.2)
+transmogrify-sqlalchemy (1.0.2)
Feed data from SQLAlchemy into a transmogrifier pipeline
sqlalchemy-schemadisplay (1.3)
@@ -96,4 +96,9 @@ def test_search(tester: CommandTester, http: type[httpretty.httpretty]):
SAP Sybase SQL Anywhere dialect for SQLAlchemy
"""
- assert tester.io.fetch_output() == expected
+ # TODO remove this when https://github.com/python-poetry/poetry-core/pull/328
+ # reaches a published version of poetry-core.
+ output = tester.io.fetch_output()
+ output = output.replace("transmogrify.sqlalchemy", "transmogrify-sqlalchemy")
+
+ assert output == expected
diff --git a/tests/console/commands/test_show.py b/tests/console/commands/test_show.py
index 73a63c7f08e..a34bf422209 100644
--- a/tests/console/commands/test_show.py
+++ b/tests/console/commands/test_show.py
@@ -1686,6 +1686,121 @@ def test_show_tree_no_dev(tester: CommandTester, poetry: Poetry, installed: Repo
assert tester.io.fetch_output() == expected
+def test_show_tree_why_package(
+ tester: CommandTester, poetry: Poetry, installed: Repository
+):
+ poetry.package.add_dependency(Factory.create_dependency("a", "=0.0.1"))
+
+ a = get_package("a", "0.0.1")
+ installed.add_package(a)
+ a.add_dependency(Factory.create_dependency("b", "=0.0.1"))
+
+ b = get_package("b", "0.0.1")
+ a.add_dependency(Factory.create_dependency("c", "=0.0.1"))
+ installed.add_package(b)
+
+ c = get_package("c", "0.0.1")
+ installed.add_package(c)
+
+ poetry.locker.mock_lock_data(
+ {
+ "package": [
+ {
+ "name": "a",
+ "version": "0.0.1",
+ "dependencies": {"b": "=0.0.1"},
+ "python-versions": "*",
+ "optional": False,
+ },
+ {
+ "name": "b",
+ "version": "0.0.1",
+ "dependencies": {"c": "=0.0.1"},
+ "python-versions": "*",
+ "optional": False,
+ },
+ {
+ "name": "c",
+ "version": "0.0.1",
+ "python-versions": "*",
+ "optional": False,
+ },
+ ],
+ "metadata": {
+ "python-versions": "*",
+ "platform": "*",
+ "content-hash": "123456789",
+ "hashes": {"a": [], "b": [], "c": []},
+ },
+ }
+ )
+
+ tester.execute("--tree --why b")
+
+ expected = """\
+a 0.0.1
+└── b =0.0.1
+ └── c =0.0.1 \n"""
+
+ assert tester.io.fetch_output() == expected
+
+
+def test_show_tree_why(tester: CommandTester, poetry: Poetry, installed: Repository):
+ poetry.package.add_dependency(Factory.create_dependency("a", "=0.0.1"))
+
+ a = get_package("a", "0.0.1")
+ installed.add_package(a)
+ a.add_dependency(Factory.create_dependency("b", "=0.0.1"))
+
+ b = get_package("b", "0.0.1")
+ a.add_dependency(Factory.create_dependency("c", "=0.0.1"))
+ installed.add_package(b)
+
+ c = get_package("c", "0.0.1")
+ installed.add_package(c)
+
+ poetry.locker.mock_lock_data(
+ {
+ "package": [
+ {
+ "name": "a",
+ "version": "0.0.1",
+ "dependencies": {"b": "=0.0.1"},
+ "python-versions": "*",
+ "optional": False,
+ },
+ {
+ "name": "b",
+ "version": "0.0.1",
+ "dependencies": {"c": "=0.0.1"},
+ "python-versions": "*",
+ "optional": False,
+ },
+ {
+ "name": "c",
+ "version": "0.0.1",
+ "python-versions": "*",
+ "optional": False,
+ },
+ ],
+ "metadata": {
+ "python-versions": "*",
+ "platform": "*",
+ "content-hash": "123456789",
+ "hashes": {"a": [], "b": [], "c": []},
+ },
+ }
+ )
+
+ tester.execute("--why")
+
+ # this has to be on a single line due to the padding whitespace, which gets stripped
+ # by pre-commit.
+ expected = """a 0.0.1 \nb 0.0.1 from a \nc 0.0.1 from b \n"""
+
+ assert tester.io.fetch_output() == expected
+
+
def test_show_required_by_deps(
tester: CommandTester, poetry: Poetry, installed: Repository
):
diff --git a/tests/console/commands/test_version.py b/tests/console/commands/test_version.py
index b73c92a4fc5..36487a3f38a 100644
--- a/tests/console/commands/test_version.py
+++ b/tests/console/commands/test_version.py
@@ -5,6 +5,7 @@
import pytest
from poetry.console.commands.version import VersionCommand
+from tests.compat import is_poetry_core_1_1_0a7_compat
if TYPE_CHECKING:
@@ -35,23 +36,25 @@ def tester(command_tester_factory: CommandTesterFactory) -> CommandTester:
("1.2.3", "patch", "1.2.4"),
("1.2.3", "minor", "1.3.0"),
("1.2.3", "major", "2.0.0"),
- ("1.2.3", "prepatch", "1.2.4-alpha.0"),
- ("1.2.3", "preminor", "1.3.0-alpha.0"),
- ("1.2.3", "premajor", "2.0.0-alpha.0"),
+ ("1.2.3", "prepatch", "1.2.4a0"),
+ ("1.2.3", "preminor", "1.3.0a0"),
+ ("1.2.3", "premajor", "2.0.0a0"),
("1.2.3-beta.1", "patch", "1.2.3"),
("1.2.3-beta.1", "minor", "1.3.0"),
("1.2.3-beta.1", "major", "2.0.0"),
- ("1.2.3-beta.1", "prerelease", "1.2.3-beta.2"),
- ("1.2.3-beta1", "prerelease", "1.2.3-beta.2"),
- ("1.2.3beta1", "prerelease", "1.2.3-beta.2"),
- ("1.2.3b1", "prerelease", "1.2.3-beta.2"),
- ("1.2.3", "prerelease", "1.2.4-alpha.0"),
+ ("1.2.3-beta.1", "prerelease", "1.2.3b2"),
+ ("1.2.3-beta1", "prerelease", "1.2.3b2"),
+ ("1.2.3beta1", "prerelease", "1.2.3b2"),
+ ("1.2.3b1", "prerelease", "1.2.3b2"),
+ ("1.2.3", "prerelease", "1.2.4a0"),
("0.0.0", "1.2.3", "1.2.3"),
],
)
def test_increment_version(
version: str, rule: str, expected: str, command: VersionCommand
):
+ if is_poetry_core_1_1_0a7_compat:
+ expected = expected.replace("a", "-alpha.").replace("b", "-beta.")
assert command.increment_version(version, rule).text == expected
@@ -73,3 +76,12 @@ def test_version_update(tester: CommandTester):
def test_short_version_update(tester: CommandTester):
tester.execute("--short 2.0.0")
assert tester.io.fetch_output() == "2.0.0\n"
+
+
+def test_dry_run(tester: CommandTester):
+ old_pyproject = tester.command.poetry.file.path.read_text()
+ tester.execute("--dry-run major")
+
+ new_pyproject = tester.command.poetry.file.path.read_text()
+ assert tester.io.fetch_output() == "Bumping version from 1.2.3 to 2.0.0\n"
+ assert old_pyproject == new_pyproject
diff --git a/tests/console/conftest.py b/tests/console/conftest.py
index 697f7badf81..022b14a2ebc 100644
--- a/tests/console/conftest.py
+++ b/tests/console/conftest.py
@@ -71,9 +71,8 @@ def setup(
p.return_value = installed
# Patch git module to not actually clone projects
- mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
- mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
- p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
+ mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+ p = mocker.patch("poetry.vcs.git.Git.get_revision")
p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
# Patch the virtual environment creation do actually do nothing
@@ -99,6 +98,7 @@ def project_directory() -> str:
@pytest.fixture
def poetry(repo: TestRepository, project_directory: str, config: Config) -> Poetry:
+
p = Factory().create_poetry(
Path(__file__).parent.parent / "fixtures" / project_directory
)
diff --git a/tests/console/test_application.py b/tests/console/test_application.py
index 3833d809228..6e65b742592 100644
--- a/tests/console/test_application.py
+++ b/tests/console/test_application.py
@@ -4,6 +4,8 @@
from typing import TYPE_CHECKING
+import pytest
+
from cleo.testers.application_tester import ApplicationTester
from entrypoints import EntryPoint
@@ -108,3 +110,23 @@ def test_application_execute_plugin_command_with_plugins_disabled(
assert tester.io.fetch_output() == ""
assert tester.io.fetch_error() == '\nThe command "foo" does not exist.\n'
assert tester.status_code == 1
+
+
+@pytest.mark.parametrize("disable_cache", [True, False])
+def test_application_verify_source_cache_flag(disable_cache: bool):
+ app = Application()
+
+ tester = ApplicationTester(app)
+ command = "debug info"
+
+ if disable_cache:
+ command = f"{command} --no-cache"
+
+ assert not app._poetry
+
+ tester.execute(command)
+
+ assert app.poetry.pool.repositories
+
+ for repo in app.poetry.pool.repositories:
+ assert repo._disable_cache == disable_cache
diff --git a/tests/fixtures/extended_project/build.py b/tests/fixtures/extended_project/build.py
index e69de29bb2d..7a12c342719 100644
--- a/tests/fixtures/extended_project/build.py
+++ b/tests/fixtures/extended_project/build.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import Any
+
+
+def build(setup_kwargs: dict[str, Any]):
+ assert setup_kwargs["name"] == "extended-project"
+ assert setup_kwargs["version"] == "1.2.3"
+
+ dynamic_module = Path(__file__).parent / "extended_project" / "built.py"
+ dynamic_module.write_text("# Generated by build.py")
diff --git a/tests/fixtures/extended_project/pyproject.toml b/tests/fixtures/extended_project/pyproject.toml
index ecb7deb9107..15b72917b0f 100644
--- a/tests/fixtures/extended_project/pyproject.toml
+++ b/tests/fixtures/extended_project/pyproject.toml
@@ -20,8 +20,10 @@ classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules"
]
-build = "build.py"
+[tool.poetry.build]
+script = "build.py"
+generate-setup-file = true
# Requirements
[tool.poetry.dependencies]
-python = "~2.7 || ^3.4"
+python = "^3.7"
diff --git a/tests/fixtures/extended_project_without_setup/pyproject.toml b/tests/fixtures/extended_project_without_setup/pyproject.toml
index 5c9dc2774c7..b3f4818f29c 100644
--- a/tests/fixtures/extended_project_without_setup/pyproject.toml
+++ b/tests/fixtures/extended_project_without_setup/pyproject.toml
@@ -27,3 +27,7 @@ generate-setup-file = false
# Requirements
[tool.poetry.dependencies]
python = "~2.7 || ^3.4"
+
+[build-system]
+requires = ["poetry-core", "cython"]
+build-backend = "poetry.core.masonry.api"
diff --git a/tests/fixtures/simple_project/pyproject.toml b/tests/fixtures/simple_project/pyproject.toml
index 41a062fc09a..45a61d43cad 100644
--- a/tests/fixtures/simple_project/pyproject.toml
+++ b/tests/fixtures/simple_project/pyproject.toml
@@ -7,7 +7,7 @@ authors = [
]
license = "MIT"
-readme = "README.rst"
+readme = ["README.rst"]
homepage = "https://python-poetry.org"
repository = "https://github.com/python-poetry/poetry"
@@ -31,5 +31,5 @@ fox = "fuz.foo:bar.baz"
[build-system]
-requires = ["poetry-core>=1.0.2"]
+requires = ["poetry-core>=1.1.0a7"]
build-backend = "poetry.core.masonry.api"
diff --git a/tests/helpers.py b/tests/helpers.py
index 08eb045baba..1969873263b 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -1,12 +1,16 @@
from __future__ import annotations
+import contextlib
import os
+import re
import shutil
+import sys
import urllib.parse
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
+from typing import Iterator
from poetry.core.masonry.utils.helpers import escape_name
from poetry.core.masonry.utils.helpers import escape_version
@@ -15,22 +19,21 @@
from poetry.core.toml.file import TOMLFile
from poetry.core.vcs.git import ParsedUrl
+from poetry.config.config import Config
from poetry.console.application import Application
from poetry.factory import Factory
from poetry.installation.executor import Executor
from poetry.packages import Locker
from poetry.repositories import Repository
from poetry.repositories.exceptions import PackageNotFound
-from poetry.utils._compat import WINDOWS
if TYPE_CHECKING:
from poetry.core.packages.dependency import Dependency
- from poetry.core.packages.types import DependencyTypes
from poetry.core.semver.version import Version
from tomlkit.toml_document import TOMLDocument
- from poetry.installation.operations import OperationTypes
+ from poetry.installation.operations.operation import Operation
from poetry.poetry import Poetry
FIXTURE_PATH = Path(__file__).parent / "fixtures"
@@ -46,7 +49,7 @@ def get_dependency(
groups: list[str] | None = None,
optional: bool = False,
allows_prereleases: bool = False,
-) -> DependencyTypes:
+) -> Dependency:
if constraint is None:
constraint = "*"
@@ -67,18 +70,14 @@ def fixture(path: str | None = None) -> Path:
def copy_or_symlink(source: Path, dest: Path) -> None:
- if dest.exists():
- if dest.is_symlink():
- os.unlink(str(dest))
- elif dest.is_dir():
- shutil.rmtree(str(dest))
- else:
- os.unlink(str(dest))
-
- # Python2 does not support os.symlink on Windows whereas Python3 does.
+ if dest.is_symlink() or dest.is_file():
+ dest.unlink() # missing_ok is only available in Python >= 3.8
+ elif dest.is_dir():
+ shutil.rmtree(dest)
+
# os.symlink requires either administrative privileges or developer mode on Win10,
# throwing an OSError if neither is active.
- if WINDOWS:
+ if sys.platform == "win32":
try:
os.symlink(str(source), str(dest), target_is_directory=source.is_dir())
except OSError:
@@ -90,19 +89,34 @@ def copy_or_symlink(source: Path, dest: Path) -> None:
os.symlink(str(source), str(dest))
-def mock_clone(_: Any, source: str, dest: Path) -> None:
+class MockDulwichRepo:
+ def __init__(self, root: Path | str, **__: Any) -> None:
+ self.path = str(root)
+
+ def head(self) -> bytes:
+ return b"9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
+
+
+def mock_clone(
+ url: str,
+ *_: Any,
+ source_root: Path | None = None,
+ **__: Any,
+) -> MockDulwichRepo:
# Checking source to determine which folder we need to copy
- parsed = ParsedUrl.parse(source)
+ parsed = ParsedUrl.parse(url)
+ path = re.sub(r"(.git)?$", "", parsed.pathname.lstrip("/"))
+
+ folder = Path(__file__).parent / "fixtures" / "git" / parsed.resource / path
+
+ if not source_root:
+ source_root = Path(Config.create().get("cache-dir")) / "src"
- folder = (
- Path(__file__).parent
- / "fixtures"
- / "git"
- / parsed.resource
- / parsed.pathname.lstrip("/").rstrip(".git")
- )
+ dest = source_root / path
+ dest.parent.mkdir(parents=True, exist_ok=True)
copy_or_symlink(folder, dest)
+ return MockDulwichRepo(dest)
def mock_download(url: str, dest: str, **__: Any) -> None:
@@ -115,7 +129,7 @@ def mock_download(url: str, dest: str, **__: Any) -> None:
class TestExecutor(Executor):
- def __init__(self, *args: Any, **kwargs: Any):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self._installs = []
@@ -134,24 +148,24 @@ def updates(self) -> list[Package]:
def removals(self) -> list[Package]:
return self._uninstalls
- def _do_execute_operation(self, operation: OperationTypes) -> None:
+ def _do_execute_operation(self, operation: Operation) -> None:
super()._do_execute_operation(operation)
if not operation.skipped:
getattr(self, f"_{operation.job_type}s").append(operation.package)
- def _execute_install(self, operation: OperationTypes) -> int:
+ def _execute_install(self, operation: Operation) -> int:
return 0
- def _execute_update(self, operation: OperationTypes) -> int:
+ def _execute_update(self, operation: Operation) -> int:
return 0
- def _execute_remove(self, operation: OperationTypes) -> int:
+ def _execute_remove(self, operation: Operation) -> int:
return 0
class PoetryTestApplication(Application):
- def __init__(self, poetry: Poetry):
+ def __init__(self, poetry: Poetry) -> None:
super().__init__()
self._poetry = poetry
@@ -166,7 +180,7 @@ def reset_poetry(self) -> None:
class TestLocker(Locker):
- def __init__(self, lock: str | Path, local_config: dict):
+ def __init__(self, lock: str | Path, local_config: dict) -> None:
self._lock = TOMLFile(lock)
self._local_config = local_config
self._lock_data = None
@@ -218,3 +232,21 @@ def find_links_for_package(self, package: Package) -> list[Link]:
f"-{escape_version(package.version.text)}-py2.py3-none-any.whl"
)
]
+
+
+@contextlib.contextmanager
+def isolated_environment(
+ environ: dict[str, Any] | None = None, clear: bool = False
+) -> Iterator[None]:
+ original_environ = dict(os.environ)
+
+ if clear:
+ os.environ.clear()
+
+ if environ:
+ os.environ.update(environ)
+
+ yield
+
+ os.environ.clear()
+ os.environ.update(original_environ)
diff --git a/tests/installation/fixtures/with-pypi-repository.test b/tests/installation/fixtures/with-pypi-repository.test
index d1ed1ae55ed..27fee8ce755 100644
--- a/tests/installation/fixtures/with-pypi-repository.test
+++ b/tests/installation/fixtures/with-pypi-repository.test
@@ -7,9 +7,9 @@ optional = false
python-versions = "*"
[package.extras]
-dev = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface", "sphinx", "zope.interface"]
-docs = ["sphinx", "zope.interface"]
-tests = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope.interface"]
+dev = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope-interface", "sphinx", "zope-interface"]
+docs = ["sphinx", "zope-interface"]
+tests = ["coverage", "hypothesis", "pympler", "pytest", "six", "zope-interface"]
[[package]]
name = "colorama"
diff --git a/tests/installation/test_chooser.py b/tests/installation/test_chooser.py
index 973e4842ff6..51c8b66c2ab 100644
--- a/tests/installation/test_chooser.py
+++ b/tests/installation/test_chooser.py
@@ -23,6 +23,8 @@
from httpretty.core import HTTPrettyRequest
+ from tests.conftest import Config
+
JSON_FIXTURES = (
Path(__file__).parent.parent / "repositories" / "fixtures" / "pypi.org" / "json"
@@ -121,6 +123,46 @@ def test_chooser_chooses_universal_wheel_link_if_available(
assert link.filename == "pytest-3.5.0-py2.py3-none-any.whl"
+@pytest.mark.parametrize(
+ ("policy", "filename"),
+ [
+ (":all:", "pytest-3.5.0.tar.gz"),
+ (":none:", "pytest-3.5.0-py2.py3-none-any.whl"),
+ ("black", "pytest-3.5.0-py2.py3-none-any.whl"),
+ ("pytest", "pytest-3.5.0.tar.gz"),
+ ("pytest,black", "pytest-3.5.0.tar.gz"),
+ ],
+)
+@pytest.mark.parametrize("source_type", ["", "legacy"])
+def test_chooser_no_binary_policy(
+ env: MockEnv,
+ mock_pypi: None,
+ mock_legacy: None,
+ source_type: str,
+ pool: Pool,
+ policy: str,
+ filename: str,
+ config: Config,
+):
+ config.merge({"installer": {"no-binary": policy.split(",")}})
+
+ chooser = Chooser(pool, env, config)
+
+ package = Package("pytest", "3.5.0")
+ if source_type == "legacy":
+ package = Package(
+ package.name,
+ package.version.text,
+ source_type="legacy",
+ source_reference="foo",
+ source_url="https://foo.bar/simple/",
+ )
+
+ link = chooser.choose_for(package)
+
+ assert link.filename == filename
+
+
@pytest.mark.parametrize("source_type", ["", "legacy"])
def test_chooser_chooses_specific_python_universal_wheel_link_if_available(
env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool
diff --git a/tests/installation/test_installer.py b/tests/installation/test_installer.py
index cd065937447..8d49a4faaf0 100644
--- a/tests/installation/test_installer.py
+++ b/tests/installation/test_installer.py
@@ -47,7 +47,7 @@
if TYPE_CHECKING:
from pytest_mock import MockerFixture
- from poetry.installation.operations import OperationTypes
+ from poetry.installation.operations.operation import Operation
from poetry.packages import DependencyPackage
from poetry.utils.env import Env
from tests.conftest import Config
@@ -62,7 +62,7 @@ def _get_installer(self) -> NoopInstaller:
class Executor(BaseExecutor):
- def __init__(self, *args: Any, **kwargs: Any):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self._installs: list[DependencyPackage] = []
@@ -81,19 +81,19 @@ def updates(self) -> list[DependencyPackage]:
def removals(self) -> list[DependencyPackage]:
return self._uninstalls
- def _do_execute_operation(self, operation: OperationTypes) -> None:
+ def _do_execute_operation(self, operation: Operation) -> None:
super()._do_execute_operation(operation)
if not operation.skipped:
getattr(self, f"_{operation.job_type}s").append(operation.package)
- def _execute_install(self, operation: OperationTypes) -> int:
+ def _execute_install(self, operation: Operation) -> int:
return 0
- def _execute_update(self, operation: OperationTypes) -> int:
+ def _execute_update(self, operation: Operation) -> int:
return 0
- def _execute_uninstall(self, operation: OperationTypes) -> int:
+ def _execute_uninstall(self, operation: Operation) -> int:
return 0
@@ -106,7 +106,7 @@ def load(
class Locker(BaseLocker):
- def __init__(self, lock_path: str | Path):
+ def __init__(self, lock_path: str | Path) -> None:
self._lock = TOMLFile(Path(lock_path).joinpath("poetry.lock"))
self._written_data = None
self._locked = False
@@ -1168,6 +1168,14 @@ def test_installer_with_pypi_repository(
installer.run()
expected = fixture("with-pypi-repository")
+
+ # TODO remove this when https://github.com/python-poetry/poetry-core/pull/328
+ # reaches a published version of poetry-core.
+ extras = locker.written_data["package"][0]["extras"]
+ for key, values in list(extras.items()):
+ extras[key] = [
+ value.replace("zope.interface", "zope-interface") for value in values
+ ]
assert not DeepDiff(expected, locker.written_data, ignore_order=True)
@@ -1178,8 +1186,15 @@ def test_run_installs_with_local_file(
package: ProjectPackage,
fixture_dir: FixtureDirGetter,
):
+ root_dir = Path(__file__).parent.parent.parent
+ package.root_dir = root_dir
+ locker.set_lock_path(root_dir)
file_path = fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl")
- package.add_dependency(Factory.create_dependency("demo", {"file": str(file_path)}))
+ package.add_dependency(
+ Factory.create_dependency(
+ "demo", {"file": str(file_path.relative_to(root_dir))}, root_dir=root_dir
+ )
+ )
repo.add_package(get_package("pendulum", "1.4.4"))
@@ -1198,10 +1213,17 @@ def test_run_installs_wheel_with_no_requires_dist(
package: ProjectPackage,
fixture_dir: FixtureDirGetter,
):
+ root_dir = Path(__file__).parent.parent.parent
+ package.root_dir = root_dir
+ locker.set_lock_path(root_dir)
file_path = fixture_dir(
"wheel_with_no_requires_dist/demo-0.1.0-py2.py3-none-any.whl"
)
- package.add_dependency(Factory.create_dependency("demo", {"file": str(file_path)}))
+ package.add_dependency(
+ Factory.create_dependency(
+ "demo", {"file": str(file_path.relative_to(root_dir))}, root_dir=root_dir
+ )
+ )
installer.run()
@@ -1220,10 +1242,15 @@ def test_run_installs_with_local_poetry_directory_and_extras(
tmpdir: Path,
fixture_dir: FixtureDirGetter,
):
+ root_dir = Path(__file__).parent.parent.parent
+ package.root_dir = root_dir
+ locker.set_lock_path(root_dir)
file_path = fixture_dir("project_with_extras")
package.add_dependency(
Factory.create_dependency(
- "project-with-extras", {"path": str(file_path), "extras": ["extras_a"]}
+ "project-with-extras",
+ {"path": str(file_path.relative_to(root_dir)), "extras": ["extras_a"]},
+ root_dir=root_dir,
)
)
@@ -1311,9 +1338,16 @@ def test_run_installs_with_local_setuptools_directory(
tmpdir: Path,
fixture_dir: FixtureDirGetter,
):
+ root_dir = Path(__file__).parent.parent.parent
+ package.root_dir = root_dir
+ locker.set_lock_path(root_dir)
file_path = fixture_dir("project_with_setup/")
package.add_dependency(
- Factory.create_dependency("project-with-setup", {"path": str(file_path)})
+ Factory.create_dependency(
+ "project-with-setup",
+ {"path": str(file_path.relative_to(root_dir))},
+ root_dir=root_dir,
+ )
)
repo.add_package(get_package("pendulum", "1.4.4"))
diff --git a/tests/installation/test_installer_old.py b/tests/installation/test_installer_old.py
index 86b15093b26..abf32b9b40a 100644
--- a/tests/installation/test_installer_old.py
+++ b/tests/installation/test_installer_old.py
@@ -54,7 +54,7 @@ def load(
class Locker(BaseLocker):
- def __init__(self, lock_path: str | Path):
+ def __init__(self, lock_path: str | Path) -> None:
self._lock = TOMLFile(Path(lock_path).joinpath("poetry.lock"))
self._written_data = None
self._locked = False
@@ -832,6 +832,14 @@ def test_installer_with_pypi_repository(
installer.run()
expected = fixture("with-pypi-repository")
+
+ # TODO remove this when https://github.com/python-poetry/poetry-core/pull/328
+ # reaches a published version of poetry-core.
+ extras = locker.written_data["package"][0]["extras"]
+ for key, values in list(extras.items()):
+ extras[key] = [
+ value.replace("zope.interface", "zope-interface") for value in values
+ ]
assert not DeepDiff(expected, locker.written_data, ignore_order=True)
diff --git a/tests/installation/test_pip_installer.py b/tests/installation/test_pip_installer.py
index 719df21cd5b..797e7874eba 100644
--- a/tests/installation/test_pip_installer.py
+++ b/tests/installation/test_pip_installer.py
@@ -117,44 +117,22 @@ def test_install_with_non_pypi_default_repository(pool: Pool, installer: PipInst
installer.install(bar)
-def test_install_with_cert():
- ca_path = "path/to/cert.pem"
- pool = Pool()
-
- default = LegacyRepository("default", "https://foo.bar", cert=Path(ca_path))
-
- pool.add_repository(default, default=True)
-
- null_env = NullEnv()
-
- installer = PipInstaller(null_env, NullIO(), pool)
-
- foo = Package(
- "foo",
- "0.0.0",
- source_type="legacy",
- source_reference=default.name,
- source_url=default.url,
- )
-
- installer.install(foo)
-
- assert len(null_env.executed) == 1
- cmd = null_env.executed[0]
- assert "--cert" in cmd
- cert_index = cmd.index("--cert")
- # Need to do the str(Path()) bit because Windows paths get modified by Path
- assert cmd[cert_index + 1] == str(Path(ca_path))
-
-
-def test_install_with_client_cert():
+@pytest.mark.parametrize(
+ ("key", "option"),
+ [
+ ("cert", "client-cert"),
+ ("verify", "cert"),
+ ],
+)
+def test_install_with_certs(mocker: MockerFixture, key: str, option: str):
client_path = "path/to/client.pem"
- pool = Pool()
-
- default = LegacyRepository(
- "default", "https://foo.bar", client_cert=Path(client_path)
+ mocker.patch(
+ "poetry.utils.authenticator.Authenticator.get_certs_for_url",
+ return_value={key: client_path},
)
+ default = LegacyRepository("default", "https://foo.bar")
+ pool = Pool()
pool.add_repository(default, default=True)
null_env = NullEnv()
@@ -173,8 +151,8 @@ def test_install_with_client_cert():
assert len(null_env.executed) == 1
cmd = null_env.executed[0]
- assert "--client-cert" in cmd
- cert_index = cmd.index("--client-cert")
+ assert f"--{option}" in cmd
+ cert_index = cmd.index(f"--{option}")
# Need to do the str(Path()) bit because Windows paths get modified by Path
assert cmd[cert_index + 1] == str(Path(client_path))
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/integration/test_utils_vcs_git.py b/tests/integration/test_utils_vcs_git.py
new file mode 100644
index 00000000000..ff844c97da4
--- /dev/null
+++ b/tests/integration/test_utils_vcs_git.py
@@ -0,0 +1,337 @@
+from __future__ import annotations
+
+import os
+import uuid
+
+from copy import deepcopy
+from hashlib import sha1
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from dulwich.client import HTTPUnauthorized
+from dulwich.client import get_transport_and_path
+from dulwich.repo import Repo
+from poetry.core.pyproject.toml import PyProjectTOML
+
+from poetry.console.exceptions import PoetrySimpleConsoleException
+from poetry.utils.authenticator import Authenticator
+from poetry.vcs.git import Git
+from poetry.vcs.git.backend import GitRefSpec
+
+
+if TYPE_CHECKING:
+ from _pytest.tmpdir import TempdirFactory
+ from dulwich.client import FetchPackResult
+ from dulwich.client import GitClient
+ from pytest_mock import MockerFixture
+
+ from tests.conftest import Config
+
+
+# these tests are integration as they rely on an external repository
+# see `source_url` fixture
+pytestmark = pytest.mark.integration
+
+
+@pytest.fixture(autouse=True)
+def git_mock() -> None:
+ pass
+
+
+@pytest.fixture(autouse=True)
+def setup(config: Config) -> None:
+ pass
+
+
+REVISION_TO_VERSION_MAP = {
+ "b6204750a763268e941cec1f05f8986b6c66913e": "0.1.0", # Annotated Tag
+ "18d3ff247d288da701fc7f9ce2ec718388fca266": "0.1.1-alpha.0",
+ "dd07e8d4efb82690e7975b289917a7782fbef29b": "0.2.0-alpha.0",
+ "7263819922b4cd008afbb447f425a562432dad7d": "0.2.0-alpha.1",
+}
+
+BRANCH_TO_REVISION_MAP = {"0.1": "18d3ff247d288da701fc7f9ce2ec718388fca266"}
+
+TAG_TO_REVISION_MAP = {"v0.1.0": "b6204750a763268e941cec1f05f8986b6c66913e"}
+
+REF_TO_REVISION_MAP = {
+ "branch": BRANCH_TO_REVISION_MAP,
+ "tag": TAG_TO_REVISION_MAP,
+}
+
+
+@pytest.fixture
+def use_system_git_client(config: Config) -> None:
+ config.merge({"experimental": {"system-git-client": True}})
+
+
+@pytest.fixture(scope="module")
+def source_url() -> str:
+ return "https://github.com/python-poetry/test-fixture-vcs-repository.git"
+
+
+@pytest.fixture(scope="module")
+def source_directory_name(source_url: str) -> str:
+ return Git.get_name_from_source_url(url=source_url)
+
+
+@pytest.fixture(scope="module")
+def local_repo(tmpdir_factory: TempdirFactory, source_directory_name: str) -> Repo:
+ with Repo.init(
+ tmpdir_factory.mktemp("src") / source_directory_name, mkdir=True
+ ) as repo:
+ yield repo
+
+
+@pytest.fixture(scope="module")
+def _remote_refs(source_url: str, local_repo: Repo) -> FetchPackResult:
+ client: GitClient
+ path: str
+ client, path = get_transport_and_path(source_url)
+ return client.fetch(
+ path, local_repo, determine_wants=local_repo.object_store.determine_wants_all
+ )
+
+
+@pytest.fixture
+def remote_refs(_remote_refs: FetchPackResult) -> FetchPackResult:
+ return deepcopy(_remote_refs)
+
+
+@pytest.fixture(scope="module")
+def remote_default_ref(_remote_refs: FetchPackResult) -> bytes:
+ return _remote_refs.symrefs[b"HEAD"]
+
+
+@pytest.fixture(scope="module")
+def remote_default_branch(remote_default_ref: bytes) -> str:
+ return remote_default_ref.decode("utf-8").replace("refs/heads/", "")
+
+
+def test_git_local_info(
+ source_url: str, remote_refs: FetchPackResult, remote_default_ref: bytes
+) -> None:
+ with Git.clone(url=source_url) as repo:
+ info = Git.info(repo=repo)
+ assert info.origin == source_url
+ assert info.revision == remote_refs.refs[remote_default_ref].decode("utf-8")
+
+
+def test_git_clone_default_branch_head(
+ source_url: str,
+ remote_refs: FetchPackResult,
+ remote_default_ref: bytes,
+ mocker: MockerFixture,
+):
+ spy = mocker.spy(Git, "_clone")
+ spy_legacy = mocker.spy(Git, "_clone_legacy")
+
+ with Git.clone(url=source_url) as repo:
+ assert remote_refs.refs[remote_default_ref] == repo.head()
+
+ spy_legacy.assert_not_called()
+ spy.assert_called()
+
+
+def test_git_clone_fails_for_non_existent_branch(source_url: str):
+ branch = uuid.uuid4().hex
+
+ with pytest.raises(PoetrySimpleConsoleException) as e:
+ Git.clone(url=source_url, branch=branch)
+
+ assert f"Failed to clone {source_url} at '{branch}'" in str(e.value)
+
+
+def test_git_clone_fails_for_non_existent_revision(source_url: str):
+ revision = sha1(uuid.uuid4().bytes).hexdigest()
+
+ with pytest.raises(PoetrySimpleConsoleException) as e:
+ Git.clone(url=source_url, revision=revision)
+
+ assert f"Failed to clone {source_url} at '{revision}'" in str(e.value)
+
+
+def assert_version(repo: Repo, expected_revision: str) -> None:
+ version = PyProjectTOML(
+ path=Path(repo.path).joinpath("pyproject.toml")
+ ).poetry_config["version"]
+
+ revision = Git.get_revision(repo=repo)
+
+ assert revision == expected_revision
+ assert revision in REVISION_TO_VERSION_MAP
+ assert version == REVISION_TO_VERSION_MAP[revision]
+
+
+def test_git_clone_when_branch_is_ref(source_url: str) -> None:
+ with Git.clone(url=source_url, branch="refs/heads/0.1") as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+@pytest.mark.parametrize("branch", [*BRANCH_TO_REVISION_MAP.keys()])
+def test_git_clone_branch(
+ source_url: str, remote_refs: FetchPackResult, branch: str
+) -> None:
+ with Git.clone(url=source_url, branch=branch) as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP[branch])
+
+
+@pytest.mark.parametrize("tag", [*TAG_TO_REVISION_MAP.keys()])
+def test_git_clone_tag(source_url: str, remote_refs: FetchPackResult, tag: str) -> None:
+ with Git.clone(url=source_url, tag=tag) as repo:
+ assert_version(repo, TAG_TO_REVISION_MAP[tag])
+
+
+def test_git_clone_multiple_times(
+ source_url: str, remote_refs: FetchPackResult
+) -> None:
+ for revision in REVISION_TO_VERSION_MAP:
+ with Git.clone(url=source_url, revision=revision) as repo:
+ assert_version(repo, revision)
+
+
+def test_git_clone_revision_is_branch(
+ source_url: str, remote_refs: FetchPackResult
+) -> None:
+ with Git.clone(url=source_url, revision="0.1") as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+def test_git_clone_revision_is_ref(
+ source_url: str, remote_refs: FetchPackResult
+) -> None:
+ with Git.clone(url=source_url, revision="refs/heads/0.1") as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+
+@pytest.mark.parametrize(
+ ("revision", "expected_revision"),
+ [
+ ("0.1", BRANCH_TO_REVISION_MAP["0.1"]),
+ ("v0.1.0", TAG_TO_REVISION_MAP["v0.1.0"]),
+ *zip(REVISION_TO_VERSION_MAP, REVISION_TO_VERSION_MAP),
+ ],
+)
+def test_git_clone_revision_is_tag(
+ source_url: str, remote_refs: FetchPackResult, revision: str, expected_revision: str
+) -> None:
+ with Git.clone(url=source_url, revision=revision) as repo:
+ assert_version(repo, expected_revision)
+
+
+def test_git_clone_clones_submodules(source_url: str) -> None:
+ with Git.clone(url=source_url) as repo:
+ submodule_package_directory = (
+ Path(repo.path) / "submodules" / "sample-namespace-packages"
+ )
+
+ assert submodule_package_directory.exists()
+ assert submodule_package_directory.joinpath("README.md").exists()
+ assert len(list(submodule_package_directory.glob("*"))) > 1
+
+
+def test_system_git_fallback_on_http_401(
+ mocker: MockerFixture,
+ source_url: str,
+) -> None:
+ spy = mocker.spy(Git, "_clone_legacy")
+ mocker.patch.object(Git, "_clone", side_effect=HTTPUnauthorized(None, None))
+
+ with Git.clone(url=source_url, branch="0.1") as repo:
+ path = Path(repo.path)
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+ spy.assert_called_with(
+ url="https://github.com/python-poetry/test-fixture-vcs-repository.git",
+ target=path,
+ refspec=GitRefSpec(branch="0.1", revision=None, tag=None, ref=b"HEAD"),
+ )
+ spy.assert_called_once()
+
+
+GIT_USERNAME = os.environ.get("POETRY_TEST_INTEGRATION_GIT_USERNAME")
+GIT_PASSWORD = os.environ.get("POETRY_TEST_INTEGRATION_GIT_PASSWORD")
+HTTP_AUTH_CREDENTIALS_AVAILABLE = not (GIT_USERNAME and GIT_PASSWORD)
+
+
+@pytest.mark.skipif(
+ HTTP_AUTH_CREDENTIALS_AVAILABLE,
+ reason="HTTP authentication credentials not available",
+)
+def test_configured_repository_http_auth(
+ mocker: MockerFixture, source_url: str, config: Config
+) -> None:
+ from poetry.vcs.git import backend
+
+ spy_clone_legacy = mocker.spy(Git, "_clone_legacy")
+ spy_get_transport_and_path = mocker.spy(backend, "get_transport_and_path")
+
+ config.merge(
+ {
+ "repositories": {"git-repo": {"url": source_url}},
+ "http-basic": {
+ "git-repo": {
+ "username": GIT_USERNAME,
+ "password": GIT_PASSWORD,
+ }
+ },
+ }
+ )
+
+ mocker.patch(
+ "poetry.vcs.git.backend.get_default_authenticator",
+ return_value=Authenticator(config=config),
+ )
+
+ with Git.clone(url=source_url, branch="0.1") as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+ spy_clone_legacy.assert_not_called()
+
+ spy_get_transport_and_path.assert_called_with(
+ location=source_url,
+ username=GIT_USERNAME,
+ password=GIT_PASSWORD,
+ )
+ spy_get_transport_and_path.assert_called_once()
+
+
+def test_username_password_parameter_is_not_passed_to_dulwich(
+ mocker: MockerFixture, source_url: str, config: Config
+) -> None:
+ from poetry.vcs.git import backend
+
+ spy_clone = mocker.spy(Git, "_clone")
+ spy_get_transport_and_path = mocker.spy(backend, "get_transport_and_path")
+
+ with Git.clone(url=source_url, branch="0.1") as repo:
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+ spy_clone.assert_called_once()
+
+ spy_get_transport_and_path.assert_called_with(
+ location=source_url,
+ )
+ spy_get_transport_and_path.assert_called_once()
+
+
+def test_system_git_called_when_configured(
+ mocker: MockerFixture, source_url: str, use_system_git_client: None
+) -> None:
+ spy_legacy = mocker.spy(Git, "_clone_legacy")
+ spy = mocker.spy(Git, "_clone")
+
+ with Git.clone(url=source_url, branch="0.1") as repo:
+ path = Path(repo.path)
+ assert_version(repo, BRANCH_TO_REVISION_MAP["0.1"])
+
+ spy.assert_not_called()
+
+ spy_legacy.assert_called_once()
+ spy_legacy.assert_called_with(
+ url=source_url,
+ target=path,
+ refspec=GitRefSpec(branch="0.1", revision=None, tag=None, ref=b"HEAD"),
+ )
diff --git a/tests/masonry/builders/test_editable_builder.py b/tests/masonry/builders/test_editable_builder.py
index cbd91f4dcdd..64b52bddc9a 100644
--- a/tests/masonry/builders/test_editable_builder.py
+++ b/tests/masonry/builders/test_editable_builder.py
@@ -12,9 +12,12 @@
from poetry.factory import Factory
from poetry.masonry.builders.editable import EditableBuilder
+from poetry.repositories.installed_repository import InstalledRepository
+from poetry.utils.env import EnvCommandError
from poetry.utils.env import EnvManager
from poetry.utils.env import MockEnv
from poetry.utils.env import VirtualEnv
+from poetry.utils.env import ephemeral_environment
if TYPE_CHECKING:
@@ -203,6 +206,41 @@ def test_builder_falls_back_on_setup_and_pip_for_packages_with_build_scripts(
assert [] == env.executed
+def test_builder_setup_generation_runs_with_pip_editable(tmp_dir: str):
+ # create an isolated copy of the project
+ fixture = Path(__file__).parent.parent.parent / "fixtures" / "extended_project"
+ extended_project = Path(tmp_dir) / "extended_project"
+
+ shutil.copytree(fixture, extended_project)
+ assert extended_project.exists()
+
+ poetry = Factory().create_poetry(extended_project)
+
+ # we need a venv with setuptools since we are verifying setup.py builds
+ with ephemeral_environment(flags={"no-setuptools": False}) as venv:
+ builder = EditableBuilder(poetry, venv, NullIO())
+ builder.build()
+
+ # is the package installed?
+ repository = InstalledRepository.load(venv)
+ assert repository.package("extended-project", "1.2.3")
+
+ # check for the module built by build.py
+ try:
+ output = venv.run_python_script(
+ "from extended_project import built; print(built.__file__)"
+ ).strip()
+ except EnvCommandError:
+ pytest.fail("Unable to import built module")
+ else:
+ built_py = Path(output).resolve()
+
+ expected = extended_project / "extended_project" / "built.py"
+
+ # ensure the package was installed as editable
+ assert built_py == expected.resolve()
+
+
def test_builder_installs_proper_files_when_packages_configured(
project_with_include: Poetry, tmp_venv: VirtualEnv
):
@@ -229,9 +267,13 @@ def test_builder_installs_proper_files_when_packages_configured(
def test_builder_should_execute_build_scripts(
- extended_without_setup_poetry: Poetry, tmp_dir: str
+ mocker: MockerFixture, extended_without_setup_poetry: Poetry, tmp_dir: str
):
env = MockEnv(path=Path(tmp_dir) / "foo")
+ mocker.patch(
+ "poetry.masonry.builders.editable.build_environment"
+ ).return_value.__enter__.return_value = env
+
builder = EditableBuilder(extended_without_setup_poetry, env, NullIO())
builder.build()
diff --git a/tests/mixology/helpers.py b/tests/mixology/helpers.py
index 544def12bd0..0120bdb72ff 100644
--- a/tests/mixology/helpers.py
+++ b/tests/mixology/helpers.py
@@ -11,7 +11,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/mixology/version_solver/test_backtracking.py b/tests/mixology/version_solver/test_backtracking.py
index bdbbfba11cc..6354d4c6a48 100644
--- a/tests/mixology/version_solver/test_backtracking.py
+++ b/tests/mixology/version_solver/test_backtracking.py
@@ -8,7 +8,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/mixology/version_solver/test_basic_graph.py b/tests/mixology/version_solver/test_basic_graph.py
index 9f392b2f26f..f8758f00c3e 100644
--- a/tests/mixology/version_solver/test_basic_graph.py
+++ b/tests/mixology/version_solver/test_basic_graph.py
@@ -8,7 +8,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/mixology/version_solver/test_dependency_cache.py b/tests/mixology/version_solver/test_dependency_cache.py
new file mode 100644
index 00000000000..469a1e569db
--- /dev/null
+++ b/tests/mixology/version_solver/test_dependency_cache.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from poetry.factory import Factory
+from poetry.mixology.version_solver import DependencyCache
+from tests.mixology.helpers import add_to_repo
+
+
+if TYPE_CHECKING:
+ from poetry.core.packages.project_package import ProjectPackage
+
+ from poetry.repositories import Repository
+ from tests.mixology.version_solver.conftest import Provider
+
+
+def test_solver_dependency_cache_respects_source_type(
+ root: ProjectPackage, provider: Provider, repo: Repository
+):
+ dependency_pypi = Factory.create_dependency("demo", ">=0.1.0")
+ dependency_git = Factory.create_dependency(
+ "demo", {"git": "https://github.com/demo/demo.git"}, groups=["dev"]
+ )
+ root.add_dependency(dependency_pypi)
+ root.add_dependency(dependency_git)
+
+ add_to_repo(repo, "demo", "1.0.0")
+
+ cache = DependencyCache(provider)
+ cache.search_for.cache_clear()
+
+ # ensure cache was never hit for both calls
+ cache.search_for(dependency_pypi)
+ cache.search_for(dependency_git)
+ assert not cache.search_for.cache_info().hits
+
+ packages_pypi = cache.search_for(dependency_pypi)
+ packages_git = cache.search_for(dependency_git)
+
+ assert cache.search_for.cache_info().hits == 2
+ assert cache.search_for.cache_info().currsize == 2
+
+ assert len(packages_pypi) == len(packages_git) == 1
+ assert packages_pypi != packages_git
+
+ package_pypi = packages_pypi[0]
+ package_git = packages_git[0]
+
+ assert package_pypi.package.name == dependency_pypi.name
+ assert package_pypi.package.version.text == "1.0.0"
+
+ assert package_git.package.name == dependency_git.name
+ assert package_git.package.version.text == "0.1.2"
+ assert package_git.package.source_type == dependency_git.source_type
+ assert package_git.package.source_url == dependency_git.source_url
+ assert (
+ package_git.package.source_resolved_reference
+ == "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
+ )
diff --git a/tests/mixology/version_solver/test_python_constraint.py b/tests/mixology/version_solver/test_python_constraint.py
index f60e3a49293..52bbdd7bdab 100644
--- a/tests/mixology/version_solver/test_python_constraint.py
+++ b/tests/mixology/version_solver/test_python_constraint.py
@@ -8,7 +8,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/mixology/version_solver/test_unsolvable.py b/tests/mixology/version_solver/test_unsolvable.py
index 072925e5507..578fcfe688f 100644
--- a/tests/mixology/version_solver/test_unsolvable.py
+++ b/tests/mixology/version_solver/test_unsolvable.py
@@ -8,7 +8,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/mixology/version_solver/test_with_lock.py b/tests/mixology/version_solver/test_with_lock.py
index cc9e28f12d6..e9d07d21d7b 100644
--- a/tests/mixology/version_solver/test_with_lock.py
+++ b/tests/mixology/version_solver/test_with_lock.py
@@ -9,7 +9,8 @@
if TYPE_CHECKING:
- from poetry.packages.project_package import ProjectPackage
+ from poetry.core.packages.project_package import ProjectPackage
+
from poetry.repositories import Repository
from tests.mixology.version_solver.conftest import Provider
diff --git a/tests/plugins/test_plugin_manager.py b/tests/plugins/test_plugin_manager.py
index 965d04499b5..7332566f14b 100644
--- a/tests/plugins/test_plugin_manager.py
+++ b/tests/plugins/test_plugin_manager.py
@@ -7,9 +7,9 @@
from cleo.io.buffered_io import BufferedIO
from entrypoints import EntryPoint
+from poetry.core.packages.project_package import ProjectPackage
from poetry.packages.locker import Locker
-from poetry.packages.project_package import ProjectPackage
from poetry.plugins import ApplicationPlugin
from poetry.plugins import Plugin
from poetry.plugins.plugin_manager import PluginManager
@@ -32,8 +32,8 @@ def __call__(self, group: str = Plugin.group) -> PluginManager:
class MyPlugin(Plugin):
def activate(self, poetry: Poetry, io: BufferedIO) -> None:
- io.write_line("Updating version")
- poetry.package.set_version("9.9.9")
+ io.write_line("Setting readmes")
+ poetry.package.readmes = ("README.md",)
class MyCommandPlugin(ApplicationPlugin):
@@ -95,8 +95,8 @@ def test_load_plugins_and_activate(
manager.load_plugins()
manager.activate(poetry, io)
- assert poetry.package.version.text == "9.9.9"
- assert io.fetch_output() == "Updating version\n"
+ assert poetry.package.readmes == ("README.md",)
+ assert io.fetch_output() == "Setting readmes\n"
def test_load_plugins_with_invalid_plugin(
diff --git a/tests/publishing/test_publisher.py b/tests/publishing/test_publisher.py
index 0f2e672b27a..2b3cf01c627 100644
--- a/tests/publishing/test_publisher.py
+++ b/tests/publishing/test_publisher.py
@@ -59,7 +59,7 @@ def test_publish_can_publish_to_given_repository(
}
)
- mocker.patch("poetry.factory.Factory.create_config", return_value=config)
+ mocker.patch("poetry.config.config.Config.create", return_value=config)
poetry = Factory().create_poetry(fixture_dir(fixture_name))
io = BufferedIO()
diff --git a/tests/puzzle/conftest.py b/tests/puzzle/conftest.py
index 23bb4635ca0..a4a93c2b4b6 100644
--- a/tests/puzzle/conftest.py
+++ b/tests/puzzle/conftest.py
@@ -1,43 +1,19 @@
from __future__ import annotations
-import shutil
-
-from pathlib import Path
from typing import TYPE_CHECKING
import pytest
+from tests.helpers import mock_clone
-try:
- import urllib.parse as urlparse
-except ImportError:
- import urlparse
if TYPE_CHECKING:
- from poetry.core.vcs import Git
from pytest_mock import MockerFixture
-def mock_clone(self: Git, source: str, dest: Path) -> None:
- # Checking source to determine which folder we need to copy
- parts = urlparse.urlparse(source)
-
- folder = (
- Path(__file__).parent.parent
- / "fixtures"
- / "git"
- / parts.netloc
- / parts.path.lstrip("/").rstrip(".git")
- )
-
- shutil.rmtree(str(dest))
- shutil.copytree(str(folder), str(dest))
-
-
@pytest.fixture(autouse=True)
def setup(mocker: MockerFixture) -> None:
# Patch git module to not actually clone projects
- mocker.patch("poetry.core.vcs.git.Git.clone", new=mock_clone)
- mocker.patch("poetry.core.vcs.git.Git.checkout", new=lambda *_: None)
- p = mocker.patch("poetry.core.vcs.git.Git.rev_parse")
+ mocker.patch("poetry.vcs.git.Git.clone", new=mock_clone)
+ p = mocker.patch("poetry.vcs.git.Git.get_revision")
p.return_value = "9cf87a285a2d3fbb0b9fa621997b3acc3631ed24"
diff --git a/tests/puzzle/test_provider.py b/tests/puzzle/test_provider.py
index 3cb0df75074..ea1b552c907 100644
--- a/tests/puzzle/test_provider.py
+++ b/tests/puzzle/test_provider.py
@@ -144,7 +144,7 @@ def test_search_for_vcs_read_setup_raises_error_if_no_version(
provider: Provider, mocker: MockerFixture
):
mocker.patch(
- "poetry.inspection.info.PackageInfo._pep517_metadata",
+ "poetry.inspection.info.get_pep517_metadata",
return_value=PackageInfo(name="demo", version=None),
)
diff --git a/tests/puzzle/test_solver.py b/tests/puzzle/test_solver.py
index 87b4e2154bf..fe47e8bd220 100644
--- a/tests/puzzle/test_solver.py
+++ b/tests/puzzle/test_solver.py
@@ -14,6 +14,7 @@
from poetry.core.version.markers import parse_marker
from poetry.factory import Factory
+from poetry.packages import DependencyPackage
from poetry.puzzle import Solver
from poetry.puzzle.exceptions import SolverProblemError
from poetry.puzzle.provider import Provider as BaseProvider
@@ -32,7 +33,7 @@
if TYPE_CHECKING:
import httpretty
- from poetry.installation.operations import OperationTypes
+ from poetry.installation.operations.operation import Operation
from poetry.puzzle.transaction import Transaction
DEFAULT_SOURCE_REF = (
@@ -94,7 +95,7 @@ def check_solver_result(
transaction: Transaction,
expected: list[dict[str, Any]],
synchronize: bool = False,
-) -> list[OperationTypes]:
+) -> list[Operation]:
for e in expected:
if "skipped" not in e:
e["skipped"] = False
@@ -1337,6 +1338,67 @@ def test_solver_duplicate_dependencies_different_constraints_merge_by_marker(
)
+def test_solver_duplicate_dependencies_different_sources_types_are_preserved(
+ solver: Solver, repo: Repository, package: Package
+):
+ pendulum = get_package("pendulum", "2.0.3")
+ repo.add_package(pendulum)
+ repo.add_package(get_package("cleo", "1.0.0"))
+ repo.add_package(get_package("demo", "0.1.0"))
+
+ dependency_pypi = Factory.create_dependency("demo", ">=0.1.0")
+ dependency_git = Factory.create_dependency(
+ "demo", {"git": "https://github.com/demo/demo.git"}, groups=["dev"]
+ )
+ package.add_dependency(dependency_git)
+ package.add_dependency(dependency_pypi)
+
+ demo = Package(
+ "demo",
+ "0.1.2",
+ source_type="git",
+ source_url="https://github.com/demo/demo.git",
+ source_reference=DEFAULT_SOURCE_REF,
+ source_resolved_reference="9cf87a285a2d3fbb0b9fa621997b3acc3631ed24",
+ )
+
+ transaction = solver.solve()
+
+ ops = check_solver_result(
+ transaction,
+ [{"job": "install", "package": pendulum}, {"job": "install", "package": demo}],
+ )
+
+ op = ops[1]
+
+ assert op.package.source_type == demo.source_type
+ assert op.package.source_reference == DEFAULT_SOURCE_REF
+ assert op.package.source_resolved_reference.startswith(
+ demo.source_resolved_reference
+ )
+
+ complete_package = solver.provider.complete_package(
+ DependencyPackage(package.to_dependency(), package)
+ )
+
+ assert len(complete_package.all_requires) == 2
+
+ pypi, git = complete_package.all_requires
+
+ assert isinstance(pypi, Dependency)
+ assert pypi == dependency_pypi
+
+ assert isinstance(git, VCSDependency)
+ assert git.constraint
+ assert git.constraint != dependency_git.constraint
+ assert (git.name, git.source_type, git.source_url, git.source_reference) == (
+ dependency_git.name,
+ dependency_git.source_type,
+ dependency_git.source_url,
+ DEFAULT_SOURCE_REF,
+ )
+
+
def test_solver_duplicate_dependencies_different_constraints_merge_no_markers(
solver: Solver, repo: Repository, package: Package
):
diff --git a/tests/puzzle/test_transaction.py b/tests/puzzle/test_transaction.py
index 6db7f2c13d2..ae4093f5b12 100644
--- a/tests/puzzle/test_transaction.py
+++ b/tests/puzzle/test_transaction.py
@@ -9,10 +9,10 @@
if TYPE_CHECKING:
- from poetry.installation.operations import OperationTypes
+ from poetry.installation.operations.operation import Operation
-def check_operations(ops: list[OperationTypes], expected: list[dict[str, Any]]) -> None:
+def check_operations(ops: list[Operation], expected: list[dict[str, Any]]) -> None:
for e in expected:
if "skipped" not in e:
e["skipped"] = False
diff --git a/tests/repositories/fixtures/legacy/invalid-version.html b/tests/repositories/fixtures/legacy/invalid-version.html
new file mode 100644
index 00000000000..039a6302ad5
--- /dev/null
+++ b/tests/repositories/fixtures/legacy/invalid-version.html
@@ -0,0 +1,12 @@
+
+
+
+ Links for poetry
+
+
+ Links for poetry
+ poetry-21.07.28.5ffb65e2ff8067c732e2b178d03b707c7fb27855-py3-none-any.whl
+ poetry-0.1.0-py3-none-any.whl
+
+
+
diff --git a/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html b/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html
new file mode 100644
index 00000000000..7b43db0f21e
--- /dev/null
+++ b/tests/repositories/fixtures/legacy/poetry-test-py2-py3-metadata-merge.html
@@ -0,0 +1,11 @@
+
+
+
+ Links for poetry-test-py2-py3-metadata-merge
+
+
+ Links for ipython
+ poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl
+ poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl
+
+
diff --git a/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl b/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl
new file mode 100644
index 00000000000..255fcf7dab7
Binary files /dev/null and b/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py2-none-any.whl differ
diff --git a/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl b/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl
new file mode 100644
index 00000000000..c4f890baafb
Binary files /dev/null and b/tests/repositories/fixtures/pypi.org/dists/poetry_test_py2_py3_metadata_merge-0.1.0-py3-none-any.whl differ
diff --git a/tests/repositories/fixtures/single-page/jax_releases.html b/tests/repositories/fixtures/single-page/jax_releases.html
new file mode 100644
index 00000000000..ce3232a9548
--- /dev/null
+++ b/tests/repositories/fixtures/single-page/jax_releases.html
@@ -0,0 +1,27 @@
+
+
+
+
+nocuda/jaxlib-0.3.0-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.0-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.2-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp310-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp37-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp38-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.5-cp39-none-manylinux2010_x86_64.whl
+nocuda/jaxlib-0.3.7-cp310-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp37-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp38-none-manylinux2014_x86_64.whl
+nocuda/jaxlib-0.3.7-cp39-none-manylinux2014_x86_64.whl
+jax/jax-0.3.0.tar.gz
+jax/jax-0.3.2.tar.gz
+jax/jax-0.3.5.tar.gz
+jax/jax-0.3.6.tar.gz
+jax/jax-0.3.7.tar.gz
+
+
diff --git a/tests/repositories/link_sources/__init__.py b/tests/repositories/link_sources/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/repositories/link_sources/test_base.py b/tests/repositories/link_sources/test_base.py
new file mode 100644
index 00000000000..6267e75c689
--- /dev/null
+++ b/tests/repositories/link_sources/test_base.py
@@ -0,0 +1,89 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from typing import Iterable
+from unittest.mock import PropertyMock
+
+import pytest
+
+from poetry.core.packages.package import Package
+from poetry.core.packages.utils.link import Link
+from poetry.core.semver.version import Version
+
+from poetry.repositories.link_sources.base import LinkSource
+
+
+if TYPE_CHECKING:
+ from pytest_mock import MockerFixture
+
+
+@pytest.fixture
+def link_source(mocker: MockerFixture) -> LinkSource:
+ url = "https://example.org"
+ link_source = LinkSource(url)
+ mocker.patch(
+ f"{LinkSource.__module__}.{LinkSource.__qualname__}.links",
+ new_callable=PropertyMock,
+ return_value=iter(
+ [
+ Link(f"{url}/demo-0.1.0.tar.gz"),
+ Link(f"{url}/demo-0.1.0_invalid.tar.gz"),
+ Link(f"{url}/invalid.tar.gz"),
+ Link(f"{url}/demo-0.1.0-py2.py3-none-any.whl"),
+ Link(f"{url}/demo-0.1.1.tar.gz"),
+ ]
+ ),
+ )
+ return link_source
+
+
+@pytest.mark.parametrize(
+ "filename, expected",
+ [
+ ("demo-0.1.0-py2.py3-none-any.whl", Package("demo", "0.1.0")),
+ ("demo-0.1.0.tar.gz", Package("demo", "0.1.0")),
+ ("demo-0.1.0.egg", Package("demo", "0.1.0")),
+ ("demo-0.1.0_invalid-py2.py3-none-any.whl", None), # invalid version
+ ("demo-0.1.0_invalid.egg", None), # invalid version
+ ("no-package-at-all.txt", None),
+ ],
+)
+def test_link_package_data(filename: str, expected: Package | None) -> None:
+ link = Link(f"https://example.org/{filename}")
+ assert LinkSource.link_package_data(link) == expected
+
+
+@pytest.mark.parametrize(
+ "name, expected",
+ [
+ ("demo", {Version.parse("0.1.0"), Version.parse("0.1.1")}),
+ ("invalid", set()),
+ ],
+)
+def test_versions(name: str, expected: set[Version], link_source: LinkSource) -> None:
+ assert set(link_source.versions(name)) == expected
+
+
+def test_packages(link_source: LinkSource) -> None:
+ expected = {
+ Package("demo", "0.1.0"),
+ Package("demo", "0.1.0"),
+ Package("demo", "0.1.1"),
+ }
+ assert set(link_source.packages) == expected
+
+
+@pytest.mark.parametrize(
+ "version_string, filenames",
+ [
+ ("0.1.0", ["demo-0.1.0.tar.gz", "demo-0.1.0-py2.py3-none-any.whl"]),
+ ("0.1.1", ["demo-0.1.1.tar.gz"]),
+ ("0.1.2", []),
+ ],
+)
+def test_links_for_version(
+ version_string: str, filenames: Iterable[str], link_source: LinkSource
+) -> None:
+ version = Version.parse(version_string)
+ expected = {Link(f"{link_source.url}/{name}") for name in filenames}
+ assert set(link_source.links_for_version("demo", version)) == expected
diff --git a/tests/repositories/test_installed_repository.py b/tests/repositories/test_installed_repository.py
index c13be7d8e3e..f6cb2ceb8a6 100644
--- a/tests/repositories/test_installed_repository.py
+++ b/tests/repositories/test_installed_repository.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from collections import namedtuple
from pathlib import Path
from typing import TYPE_CHECKING
@@ -71,15 +72,11 @@ def repository(mocker: MockerFixture, env: MockEnv) -> InstalledRepository:
return_value=INSTALLED_RESULTS,
)
mocker.patch(
- "poetry.core.vcs.git.Git.rev_parse",
- return_value="bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6",
- )
- mocker.patch(
- "poetry.core.vcs.git.Git.remote_urls",
- side_effect=[
- {"remote.origin.url": "https://github.com/sdispater/pendulum.git"},
- {"remote.origin.url": "git@github.com:sdispater/pendulum.git"},
- ],
+ "poetry.vcs.git.Git.info",
+ return_value=namedtuple("GitRepoLocalInfo", "origin revision")(
+ origin="https://github.com/sdispater/pendulum.git",
+ revision="bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6",
+ ),
)
mocker.patch("poetry.repositories.installed_repository._VENDORS", str(VENDOR_DIR))
return InstalledRepository.load(env)
diff --git a/tests/repositories/test_legacy_repository.py b/tests/repositories/test_legacy_repository.py
index d6fecec4d93..bb0c33f6a76 100644
--- a/tests/repositories/test_legacy_repository.py
+++ b/tests/repositories/test_legacy_repository.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+import base64
+import re
import shutil
from pathlib import Path
@@ -27,6 +29,13 @@
from _pytest.monkeypatch import MonkeyPatch
+ from poetry.config.config import Config
+
+
+@pytest.fixture(autouse=True)
+def _use_simple_keyring(with_simple_keyring: None) -> None:
+ pass
+
class MockRepository(LegacyRepository):
@@ -82,6 +91,37 @@ def test_page_clean_link():
assert cleaned == "https://legacy.foo.bar/test%20/the%22/cleaning%00"
+def test_page_invalid_version_link():
+ repo = MockRepository()
+
+ page = repo._get_page("/invalid-version")
+
+ links = list(page.links)
+ assert len(links) == 2
+
+ versions = list(page.versions("poetry"))
+ assert len(versions) == 1
+ assert versions[0].to_string() == "0.1.0"
+
+ invalid_link = None
+
+ for link in links:
+ if link.filename.startswith("poetry-21"):
+ invalid_link = link
+ break
+
+ links_010 = list(page.links_for_version("poetry", versions[0]))
+ assert invalid_link not in links_010
+
+ assert invalid_link
+ assert not page.link_package_data(invalid_link)
+
+ packages = list(page.packages)
+ assert len(packages) == 1
+ assert packages[0].name == "poetry"
+ assert packages[0].version.to_string() == "0.1.0"
+
+
def test_sdist_format_support():
repo = MockRepository()
page = repo._get_page("/relative")
@@ -255,6 +295,16 @@ def test_get_package_from_both_py2_and_py3_specific_wheels():
assert str(required[5].marker) == 'sys_platform != "win32"'
+def test_get_package_from_both_py2_and_py3_specific_wheels_python_constraint():
+ repo = MockRepository()
+
+ package = repo.package("poetry-test-py2-py3-metadata-merge", "0.1.0")
+
+ assert package.name == "poetry-test-py2-py3-metadata-merge"
+ assert package.version.text == "0.1.0"
+ assert package.python_versions == ">=2.7,<2.8 || >=3.7,<4.0"
+
+
def test_get_package_with_dist_and_universal_py3_wheel():
repo = MockRepository()
@@ -339,7 +389,9 @@ def test_get_package_retrieves_packages_with_no_hashes():
class MockHttpRepository(LegacyRepository):
- def __init__(self, endpoint_responses: dict, http: type[httpretty.httpretty]):
+ def __init__(
+ self, endpoint_responses: dict, http: type[httpretty.httpretty]
+ ) -> None:
base_url = "http://legacy.foo.bar"
super().__init__("legacy", url=base_url, disable_cache=True)
@@ -374,7 +426,7 @@ def test_get_redirected_response_url(
repo = MockHttpRepository({"/foo": 200}, http)
redirect_url = "http://legacy.redirect.bar"
- def get_mock(url: str) -> requests.Response:
+ def get_mock(url: str, raise_for_status: bool = True) -> requests.Response:
response = requests.Response()
response.status_code = 200
response.url = redirect_url + "/foo"
@@ -382,3 +434,42 @@ def get_mock(url: str) -> requests.Response:
monkeypatch.setattr(repo.session, "get", get_mock)
assert repo._get_page("/foo")._url == "http://legacy.redirect.bar/foo/"
+
+
+@pytest.mark.parametrize(
+ ("repositories",),
+ [
+ ({},),
+ # ensure path is respected
+ ({"publish": {"url": "https://foo.bar/legacy"}},),
+ # ensure path length does not give incorrect results
+ ({"publish": {"url": "https://foo.bar/upload/legacy"}},),
+ ],
+)
+def test_authenticator_with_implicit_repository_configuration(
+ http: type[httpretty.httpretty],
+ config: Config,
+ repositories: dict[str, dict[str, str]],
+) -> None:
+ http.register_uri(
+ http.GET,
+ re.compile("^https?://foo.bar/(.+?)$"),
+ )
+
+ config.merge(
+ {
+ "repositories": repositories,
+ "http-basic": {
+ "source": {"username": "foo", "password": "bar"},
+ "publish": {"username": "baz", "password": "qux"},
+ },
+ }
+ )
+
+ repo = LegacyRepository(name="source", url="https://foo.bar/simple", config=config)
+ repo._get_page("/foo")
+
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"foo:bar").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
diff --git a/tests/repositories/test_pypi_repository.py b/tests/repositories/test_pypi_repository.py
index 6b30dc46ad3..74fd7e7cbfb 100644
--- a/tests/repositories/test_pypi_repository.py
+++ b/tests/repositories/test_pypi_repository.py
@@ -22,12 +22,17 @@
from pytest_mock import MockerFixture
+@pytest.fixture(autouse=True)
+def _use_simple_keyring(with_simple_keyring: None) -> None:
+ pass
+
+
class MockRepository(PyPiRepository):
JSON_FIXTURES = Path(__file__).parent / "fixtures" / "pypi.org" / "json"
DIST_FIXTURES = Path(__file__).parent / "fixtures" / "pypi.org" / "dists"
- def __init__(self, fallback: bool = False):
+ def __init__(self, fallback: bool = False) -> None:
super().__init__(url="http://foo.bar", disable_cache=True, fallback=fallback)
def _get(self, url: str) -> dict | None:
@@ -218,10 +223,11 @@ def test_get_should_invalid_cache_on_too_many_redirects_error(mocker: MockerFixt
delete_cache = mocker.patch("cachecontrol.caches.file_cache.FileCache.delete")
response = Response()
+ response.status_code = 200
response.encoding = "utf-8"
response.raw = BytesIO(encode('{"foo": "bar"}'))
mocker.patch(
- "cachecontrol.adapter.CacheControlAdapter.send",
+ "poetry.utils.authenticator.Authenticator.get",
side_effect=[TooManyRedirects(), response],
)
repository = PyPiRepository()
diff --git a/tests/repositories/test_single_page_repository.py b/tests/repositories/test_single_page_repository.py
new file mode 100644
index 00000000000..9ea409426a2
--- /dev/null
+++ b/tests/repositories/test_single_page_repository.py
@@ -0,0 +1,61 @@
+from __future__ import annotations
+
+import re
+
+from pathlib import Path
+
+from poetry.core.packages.dependency import Dependency
+
+from poetry.repositories.link_sources.html import SimpleRepositoryPage
+from poetry.repositories.single_page_repository import SinglePageRepository
+
+
+class MockSinglePageRepository(SinglePageRepository):
+
+ FIXTURES = Path(__file__).parent / "fixtures" / "single-page"
+
+ def __init__(self, page: str) -> None:
+ super().__init__(
+ "single-page",
+ url=f"http://single-page.foo.bar/{page}.html",
+ disable_cache=True,
+ )
+
+ def _get_page(self, endpoint: str = None) -> SimpleRepositoryPage | None:
+ fixture = self.FIXTURES / self.url.rsplit("/", 1)[-1]
+ if not fixture.exists():
+ return
+
+ with fixture.open(encoding="utf-8") as f:
+ return SimpleRepositoryPage(self._url, f.read())
+
+ def _download(self, url: str, dest: Path) -> None:
+ raise RuntimeError("Tests are not configured for downloads")
+
+
+def test_single_page_repository_get_page():
+ repo = MockSinglePageRepository("jax_releases")
+
+ page = repo._get_page("/ignored")
+ links = list(page.links)
+
+ assert len(links) == 21
+
+ for link in links:
+ assert re.match(r"^(jax|jaxlib)-0\.3\.\d.*\.(whl|tar\.gz)$", link.filename)
+ assert link.netloc == "storage.googleapis.com"
+ assert link.path.startswith("/jax-releases/")
+
+
+def test_single_page_repository_find_packages():
+ repo = MockSinglePageRepository("jax_releases")
+
+ dep = Dependency("jaxlib", "0.3.7")
+
+ packages = repo.find_packages(dep)
+
+ assert len(packages) == 1
+
+ package = packages[0]
+ assert package.name == dep.name
+ assert package.to_dependency().to_pep_508() == dep.to_pep_508()
diff --git a/tests/test_factory.py b/tests/test_factory.py
index 8c623cc5932..a281ef78b4d 100644
--- a/tests/test_factory.py
+++ b/tests/test_factory.py
@@ -5,6 +5,7 @@
import pytest
+from deepdiff import DeepDiff
from entrypoints import EntryPoint
from poetry.core.semver.helpers import parse_constraint
from poetry.core.toml.file import TOMLFile
@@ -27,8 +28,8 @@
class MyPlugin(Plugin):
def activate(self, poetry: Poetry, io: IO) -> None:
- io.write_line("Updating version")
- poetry.package.set_version("9.9.9")
+ io.write_line("Setting readmes")
+ poetry.package.readmes = ("README.md",)
def test_create_poetry():
@@ -41,10 +42,12 @@ def test_create_poetry():
assert package.description == "Some description."
assert package.authors == ["Sébastien Eustace "]
assert package.license.id == "MIT"
- assert (
- package.readme.relative_to(fixtures_dir).as_posix()
- == "sample_project/README.rst"
- )
+
+ for readme in package.readmes:
+ assert (
+ readme.relative_to(fixtures_dir).as_posix() == "sample_project/README.rst"
+ )
+
assert package.homepage == "https://python-poetry.org"
assert package.repository_url == "https://github.com/python-poetry/poetry"
assert package.keywords == ["packaging", "dependency", "poetry"]
@@ -133,6 +136,34 @@ def test_create_poetry():
]
+@pytest.mark.parametrize(
+ ("project",),
+ [
+ ("simple_project",),
+ ("project_with_extras",),
+ ],
+)
+def test_create_pyproject_from_package(project: str):
+ poetry = Factory().create_poetry(fixtures_dir / project)
+ package = poetry.package
+
+ pyproject = Factory.create_pyproject_from_package(package)
+
+ result = pyproject["tool"]["poetry"]
+ expected = poetry.pyproject.poetry_config
+
+ # packages do not support this at present
+ expected.pop("scripts", None)
+
+ # remove any empty sections
+ sections = list(expected.keys())
+ for section in sections:
+ if not expected[section]:
+ expected.pop(section)
+
+ assert not DeepDiff(expected, result)
+
+
def test_create_poetry_with_packages_and_includes():
poetry = Factory().create_poetry(fixtures_dir / "with-include")
@@ -299,6 +330,8 @@ def test_create_poetry_with_local_config(fixture_dir: FixtureDirGetter):
assert not poetry.config.get("virtualenvs.in-project")
assert not poetry.config.get("virtualenvs.create")
assert not poetry.config.get("virtualenvs.options.always-copy")
+ assert not poetry.config.get("virtualenvs.options.no-pip")
+ assert not poetry.config.get("virtualenvs.options.no-setuptools")
assert not poetry.config.get("virtualenvs.options.system-site-packages")
@@ -310,4 +343,4 @@ def test_create_poetry_with_plugins(mocker: MockerFixture):
poetry = Factory().create_poetry(fixtures_dir / "sample_project")
- assert poetry.package.version.text == "9.9.9"
+ assert poetry.package.readmes == ("README.md",)
diff --git a/tests/utils/fixtures/setups/pendulum/setup.py b/tests/utils/fixtures/setups/pendulum/setup.py
index d0af694c887..705bd404289 100644
--- a/tests/utils/fixtures/setups/pendulum/setup.py
+++ b/tests/utils/fixtures/setups/pendulum/setup.py
@@ -2,7 +2,7 @@
from distutils.core import setup
-from build import *
+from build import * # nopycln: import
packages = [
diff --git a/tests/utils/test_authenticator.py b/tests/utils/test_authenticator.py
index 4c8f77faedf..2467959b6f5 100644
--- a/tests/utils/test_authenticator.py
+++ b/tests/utils/test_authenticator.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import base64
import re
import uuid
@@ -286,19 +287,16 @@ def callback(
assert sleep.call_count == attempts
-@pytest.fixture
-def environment_repository_credentials(monkeypatch: MonkeyPatch) -> None:
- monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_USERNAME", "bar")
- monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_PASSWORD", "baz")
-
-
def test_authenticator_uses_env_provided_credentials(
config: Config,
environ: None,
mock_remote: type[httpretty.httpretty],
http: type[httpretty.httpretty],
- environment_repository_credentials: None,
+ monkeypatch: MonkeyPatch,
):
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_USERNAME", "bar")
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_PASSWORD", "baz")
+
config.merge({"repositories": {"foo": {"url": "https://foo.bar/simple/"}}})
authenticator = Authenticator(config, NullIO())
@@ -339,10 +337,12 @@ def test_authenticator_uses_certs_from_config_if_not_provided(
)
authenticator = Authenticator(config, NullIO())
- session_send = mocker.patch.object(authenticator.session, "send")
+ url = "https://foo.bar/files/foo-0.1.0.tar.gz"
+ session = authenticator.get_session(url)
+ session_send = mocker.patch.object(session, "send")
authenticator.request(
"get",
- "https://foo.bar/files/foo-0.1.0.tar.gz",
+ url,
verify=cert,
cert=client_cert,
)
@@ -350,3 +350,252 @@ def test_authenticator_uses_certs_from_config_if_not_provided(
assert Path(kwargs["verify"]) == Path(cert or configured_cert)
assert Path(kwargs["cert"]) == Path(client_cert or configured_client_cert)
+
+
+def test_authenticator_uses_credentials_from_config_matched_by_url_path(
+ config: Config, mock_remote: None, http: type[httpretty.httpretty]
+):
+ config.merge(
+ {
+ "repositories": {
+ "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"},
+ "foo-beta": {"url": "https://foo.bar/beta/files/simple/"},
+ },
+ "http-basic": {
+ "foo-alpha": {"username": "bar", "password": "alpha"},
+ "foo-beta": {"username": "baz", "password": "beta"},
+ },
+ }
+ )
+
+ authenticator = Authenticator(config, NullIO())
+ authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz")
+
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"bar:alpha").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+ # Make request on second repository with the same netloc but different credentials
+ authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz")
+
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"baz:beta").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_uses_credentials_from_config_with_at_sign_in_path(
+ config: Config, mock_remote: None, http: type[httpretty.httpretty]
+):
+ config.merge(
+ {
+ "repositories": {
+ "foo": {"url": "https://foo.bar/beta/files/simple/"},
+ },
+ "http-basic": {
+ "foo": {"username": "bar", "password": "baz"},
+ },
+ }
+ )
+ authenticator = Authenticator(config, NullIO())
+ authenticator.request("get", "https://foo.bar/beta/files/simple/f@@-0.1.0.tar.gz")
+
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"bar:baz").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_falls_back_to_keyring_url_matched_by_path(
+ config: Config,
+ mock_remote: None,
+ http: type[httpretty.httpretty],
+ with_simple_keyring: None,
+ dummy_keyring: DummyBackend,
+):
+ config.merge(
+ {
+ "repositories": {
+ "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"},
+ "foo-beta": {"url": "https://foo.bar/beta/files/simple/"},
+ }
+ }
+ )
+
+ dummy_keyring.set_password(
+ "https://foo.bar/alpha/files/simple/", None, SimpleCredential(None, "bar")
+ )
+ dummy_keyring.set_password(
+ "https://foo.bar/beta/files/simple/", None, SimpleCredential(None, "baz")
+ )
+
+ authenticator = Authenticator(config, NullIO())
+
+ authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz")
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b":bar").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+ authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz")
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b":baz").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_uses_env_provided_credentials_matched_by_url_path(
+ config: Config,
+ environ: None,
+ mock_remote: type[httpretty.httpretty],
+ http: type[httpretty.httpretty],
+ monkeypatch: MonkeyPatch,
+):
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_ALPHA_USERNAME", "bar")
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_ALPHA_PASSWORD", "alpha")
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_BETA_USERNAME", "baz")
+ monkeypatch.setenv("POETRY_HTTP_BASIC_FOO_BETA_PASSWORD", "beta")
+
+ config.merge(
+ {
+ "repositories": {
+ "foo-alpha": {"url": "https://foo.bar/alpha/files/simple/"},
+ "foo-beta": {"url": "https://foo.bar/beta/files/simple/"},
+ }
+ }
+ )
+
+ authenticator = Authenticator(config, NullIO())
+
+ authenticator.request("get", "https://foo.bar/alpha/files/simple/foo-0.1.0.tar.gz")
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"bar:alpha").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+ authenticator.request("get", "https://foo.bar/beta/files/simple/foo-0.1.0.tar.gz")
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"baz:beta").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_azure_feed_guid_credentials(
+ config: Config,
+ mock_remote: None,
+ http: type[httpretty.httpretty],
+ with_simple_keyring: None,
+ dummy_keyring: DummyBackend,
+):
+ config.merge(
+ {
+ "repositories": {
+ "alpha": {
+ "url": "https://foo.bar/org-alpha/_packaging/feed/pypi/simple/"
+ },
+ "beta": {
+ "url": "https://foo.bar/org-beta/_packaging/feed/pypi/simple/"
+ },
+ },
+ "http-basic": {
+ "alpha": {"username": "foo", "password": "bar"},
+ "beta": {"username": "baz", "password": "qux"},
+ },
+ }
+ )
+
+ authenticator = Authenticator(config, NullIO())
+
+ authenticator.request(
+ "get",
+ "https://foo.bar/org-alpha/_packaging/GUID/pypi/simple/a/1.0.0/a-1.0.0.whl",
+ )
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"foo:bar").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+ authenticator.request(
+ "get",
+ "https://foo.bar/org-beta/_packaging/GUID/pypi/simple/b/1.0.0/a-1.0.0.whl",
+ )
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"baz:qux").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_add_repository(
+ config: Config,
+ mock_remote: None,
+ http: type[httpretty.httpretty],
+ with_simple_keyring: None,
+ dummy_keyring: DummyBackend,
+):
+ config.merge(
+ {
+ "http-basic": {
+ "source": {"username": "foo", "password": "bar"},
+ },
+ }
+ )
+
+ authenticator = Authenticator(config, NullIO())
+
+ authenticator.request(
+ "get",
+ "https://foo.bar/simple/a/1.0.0/a-1.0.0.whl",
+ )
+ request = http.last_request()
+ assert "Authorization" not in request.headers
+
+ authenticator.add_repository("source", "https://foo.bar/simple/")
+
+ authenticator.request(
+ "get",
+ "https://foo.bar/simple/a/1.0.0/a-1.0.0.whl",
+ )
+ request = http.last_request()
+
+ basic_auth = base64.b64encode(b"foo:bar").decode()
+ assert request.headers["Authorization"] == f"Basic {basic_auth}"
+
+
+def test_authenticator_git_repositories(
+ config: Config,
+ mock_remote: None,
+ http: type[httpretty.httpretty],
+ with_simple_keyring: None,
+ dummy_keyring: DummyBackend,
+):
+ config.merge(
+ {
+ "repositories": {
+ "one": {"url": "https://foo.bar/org/one.git"},
+ "two": {"url": "https://foo.bar/org/two.git"},
+ },
+ "http-basic": {
+ "one": {"username": "foo", "password": "bar"},
+ "two": {"username": "baz", "password": "qux"},
+ },
+ }
+ )
+
+ authenticator = Authenticator(config, NullIO())
+
+ one = authenticator.get_credentials_for_git_url("https://foo.bar/org/one.git")
+ assert one.username == "foo"
+ assert one.password == "bar"
+
+ two = authenticator.get_credentials_for_git_url("https://foo.bar/org/two.git")
+ assert two.username == "baz"
+ assert two.password == "qux"
+
+ two_ssh = authenticator.get_credentials_for_git_url("ssh://git@foo.bar/org/two.git")
+ assert not two_ssh.username
+ assert not two_ssh.password
+
+ three = authenticator.get_credentials_for_git_url("https://foo.bar/org/three.git")
+ assert not three.username
+ assert not three.password
diff --git a/tests/utils/test_dependency_specification.py b/tests/utils/test_dependency_specification.py
new file mode 100644
index 00000000000..9cbd96033fa
--- /dev/null
+++ b/tests/utils/test_dependency_specification.py
@@ -0,0 +1,109 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+
+from deepdiff import DeepDiff
+
+from poetry.utils.dependency_specification import parse_dependency_specification
+from tests.compat import is_poetry_core_1_1_0a7_compat
+
+
+if TYPE_CHECKING:
+ from pytest_mock import MockerFixture
+
+ from poetry.utils.dependency_specification import DependencySpec
+
+
+@pytest.mark.parametrize(
+ ("requirement", "specification"),
+ [
+ (
+ "git+https://github.com/demo/demo.git",
+ {"git": "https://github.com/demo/demo.git", "name": "demo"},
+ ),
+ (
+ "git+ssh://github.com/demo/demo.git",
+ {"git": "ssh://github.com/demo/demo.git", "name": "demo"},
+ ),
+ (
+ "git+https://github.com/demo/demo.git#main",
+ {"git": "https://github.com/demo/demo.git", "name": "demo", "rev": "main"},
+ ),
+ (
+ "git+https://github.com/demo/demo.git@main",
+ {"git": "https://github.com/demo/demo.git", "name": "demo", "rev": "main"},
+ ),
+ ("demo", {"name": "demo"}),
+ ("demo@1.0.0", {"name": "demo", "version": "1.0.0"}),
+ ("demo@^1.0.0", {"name": "demo", "version": "^1.0.0"}),
+ ("demo@==1.0.0", {"name": "demo", "version": "==1.0.0"}),
+ ("demo@!=1.0.0", {"name": "demo", "version": "!=1.0.0"}),
+ ("demo@~1.0.0", {"name": "demo", "version": "~1.0.0"}),
+ ("demo[a,b]@1.0.0", {"name": "demo", "version": "1.0.0", "extras": ["a", "b"]}),
+ ("demo[a,b]", {"name": "demo", "extras": ["a", "b"]}),
+ ("../demo", {"name": "demo", "path": "../demo"}),
+ ("../demo/demo.whl", {"name": "demo", "path": "../demo/demo.whl"}),
+ (
+ "https://example.com/packages/demo-0.1.0.tar.gz",
+ {"name": "demo", "url": "https://example.com/packages/demo-0.1.0.tar.gz"},
+ ),
+ # PEP 508 inputs
+ (
+ "poetry-core (>=1.0.7,<1.1.0)",
+ {"name": "poetry-core", "version": ">=1.0.7,<1.1.0"},
+ ),
+ (
+ 'requests [security,tests] >= 2.8.1, == 2.8.* ; python_version < "2.7"',
+ {
+ "name": "requests",
+ "markers": 'python_version < "2.7"',
+ "version": ">=2.8.1,<2.9.0",
+ "extras": ["security", "tests"],
+ },
+ ),
+ ("name (>=3,<4)", {"name": "name", "version": ">=3,<4"}),
+ (
+ "name@http://foo.com",
+ {"name": "name", "url": "http://foo.com"},
+ ),
+ (
+ "name [fred,bar] @ http://foo.com ; python_version=='2.7'",
+ {
+ "name": "name",
+ "markers": 'python_version == "2.7"',
+ "url": "http://foo.com",
+ **(
+ {} if is_poetry_core_1_1_0a7_compat else {"extras": ["fred", "bar"]}
+ ),
+ },
+ ),
+ (
+ 'cachecontrol[filecache] (>=0.12.9,<0.13.0); python_version >= "3.6" and'
+ ' python_version < "4.0"',
+ {
+ "version": ">=0.12.9,<0.13.0",
+ "markers": 'python_version >= "3.6" and python_version < "4.0"',
+ "extras": ["filecache"],
+ "name": "cachecontrol",
+ },
+ ),
+ ],
+)
+def test_parse_dependency_specification(
+ requirement: str, specification: DependencySpec, mocker: MockerFixture
+) -> None:
+ original = Path.exists
+
+ def _mock(self: Path) -> bool:
+ if "/" in requirement and self == Path.cwd().joinpath(requirement):
+ return True
+ return original(self)
+
+ mocker.patch("pathlib.Path.exists", _mock)
+
+ assert not DeepDiff(
+ parse_dependency_specification(requirement), specification, ignore_order=True
+ )
diff --git a/tests/utils/test_env.py b/tests/utils/test_env.py
index c1ae3181c06..776a18ad9cd 100644
--- a/tests/utils/test_env.py
+++ b/tests/utils/test_env.py
@@ -1,14 +1,12 @@
from __future__ import annotations
import os
-import shutil
import subprocess
import sys
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
-from typing import Callable
from typing import Iterator
import pytest
@@ -19,18 +17,24 @@
from poetry.core.toml.file import TOMLFile
from poetry.factory import Factory
+from poetry.repositories.installed_repository import InstalledRepository
from poetry.utils._compat import WINDOWS
from poetry.utils.env import GET_BASE_PREFIX
from poetry.utils.env import EnvCommandError
from poetry.utils.env import EnvManager
from poetry.utils.env import GenericEnv
from poetry.utils.env import InvalidCurrentPythonVersionError
+from poetry.utils.env import MockEnv
from poetry.utils.env import NoCompatiblePythonVersionFound
from poetry.utils.env import SystemEnv
from poetry.utils.env import VirtualEnv
+from poetry.utils.env import build_environment
+from poetry.utils.helpers import remove_directory
if TYPE_CHECKING:
+ from collections.abc import Callable
+
from pytest_mock import MockerFixture
from poetry.poetry import Poetry
@@ -55,7 +59,7 @@ def __init__(
path: Path,
base: Path | None = None,
sys_path: list[str] | None = None,
- ):
+ ) -> None:
super().__init__(path, base=base)
self._sys_path = sys_path
@@ -208,10 +212,12 @@ def test_activate_activates_non_existing_virtualenv_no_envs_file(
m.assert_called_with(
Path(tmp_dir) / f"{venv_name}-py3.7",
executable="/usr/bin/python3.7",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
@@ -343,10 +349,12 @@ def test_activate_activates_different_virtualenv_with_envs_file(
m.assert_called_with(
Path(tmp_dir) / f"{venv_name}-py3.6",
executable="/usr/bin/python3.6",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
assert envs_file.exists()
@@ -404,10 +412,12 @@ def test_activate_activates_recreates_for_different_patch(
build_venv_m.assert_called_with(
Path(tmp_dir) / f"{venv_name}-py3.7",
executable="/usr/bin/python3.7",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
remove_venv_m.assert_called_with(Path(tmp_dir) / f"{venv_name}-py3.7")
@@ -704,19 +714,19 @@ def test_remove_keeps_dir_if_not_deleteable(
side_effect=check_output_wrapper(Version.parse("3.6.6")),
)
- original_rmtree = shutil.rmtree
-
- def err_on_rm_venv_only(path: str, *args: Any, **kwargs: Any) -> None:
- if path == str(venv_path):
+ def err_on_rm_venv_only(path: Path | str, *args: Any, **kwargs: Any) -> None:
+ if str(path) == str(venv_path):
raise OSError(16, "Test error") # ERRNO 16: Device or resource busy
else:
- original_rmtree(path)
+ remove_directory(path)
- m = mocker.patch("shutil.rmtree", side_effect=err_on_rm_venv_only)
+ m = mocker.patch(
+ "poetry.utils.env.remove_directory", side_effect=err_on_rm_venv_only
+ )
venv = manager.remove(f"{venv_name}-py3.6")
- m.assert_any_call(str(venv_path))
+ m.assert_any_call(venv_path)
assert venv_path == venv.path
assert venv_path.exists()
@@ -725,7 +735,7 @@ def err_on_rm_venv_only(path: str, *args: Any, **kwargs: Any) -> None:
assert not file1_path.exists()
assert not file2_path.exists()
- m.side_effect = original_rmtree # Avoid teardown using `err_on_rm_venv_only`
+ m.side_effect = remove_directory # Avoid teardown using `err_on_rm_venv_only`
@pytest.mark.skipif(os.name == "nt", reason="Symlinks are not support for Windows")
@@ -839,10 +849,12 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_generic_
m.assert_called_with(
config_virtualenvs_path / f"{venv_name}-py3.7",
executable="python3",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
@@ -870,10 +882,12 @@ def test_create_venv_tries_to_find_a_compatible_python_executable_using_specific
m.assert_called_with(
config_virtualenvs_path / f"{venv_name}-py3.9",
executable="python3.9",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
@@ -960,10 +974,12 @@ def test_create_venv_uses_patch_version_to_detect_compatibility(
m.assert_called_with(
config_virtualenvs_path / f"{venv_name}-py{version.major}.{version.minor}",
executable=None,
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
@@ -999,10 +1015,12 @@ def test_create_venv_uses_patch_version_to_detect_compatibility_with_executable(
m.assert_called_with(
config_virtualenvs_path / f"{venv_name}-py{version.major}.{version.minor - 1}",
executable=f"python{version.major}.{version.minor - 1}",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
@@ -1067,10 +1085,12 @@ def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir(
m.assert_called_with(
poetry.file.parent / ".venv",
executable="/usr/bin/python3.7",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
envs_file = TOMLFile(Path(tmp_dir) / "virtualenvs" / "envs.toml")
@@ -1118,6 +1138,36 @@ def test_env_system_packages(tmp_path: Path, poetry: Poetry):
assert "include-system-site-packages = true" in pyvenv_cfg.read_text()
+@pytest.mark.parametrize(
+ ("flags", "packages"),
+ [
+ ({"no-pip": False}, {"pip", "wheel"}),
+ ({"no-pip": False, "no-wheel": True}, {"pip"}),
+ ({"no-pip": True}, set()),
+ ({"no-setuptools": False}, {"setuptools"}),
+ ({"no-setuptools": True}, set()),
+ ({"no-pip": True, "no-setuptools": False}, {"setuptools"}),
+ ({"no-wheel": False}, {"wheel"}),
+ ({}, set()),
+ ],
+)
+def test_env_no_pip(
+ tmp_path: Path, poetry: Poetry, flags: dict[str, bool], packages: set[str]
+):
+ venv_path = tmp_path / "venv"
+ EnvManager(poetry).build_venv(path=venv_path, flags=flags)
+ env = VirtualEnv(venv_path)
+ installed_repository = InstalledRepository.load(env=env, with_dependencies=True)
+ installed_packages = {
+ package.name
+ for package in installed_repository.packages
+ # workaround for BSD test environments
+ if package.name != "sqlite3"
+ }
+
+ assert installed_packages == packages
+
+
def test_env_finds_the_correct_executables(tmp_dir: str, manager: EnvManager):
venv_path = Path(tmp_dir) / "Virtual Env"
manager.build_venv(str(venv_path), with_pip=True)
@@ -1268,10 +1318,12 @@ def test_create_venv_accepts_fallback_version_w_nonzero_patchlevel(
m.assert_called_with(
config_virtualenvs_path / f"{venv_name}-py3.5",
executable="python3.5",
- flags={"always-copy": False, "system-site-packages": False},
- with_pip=True,
- with_setuptools=True,
- with_wheel=True,
+ flags={
+ "always-copy": False,
+ "system-site-packages": False,
+ "no-pip": False,
+ "no-setuptools": False,
+ },
)
@@ -1282,3 +1334,51 @@ def test_generate_env_name_ignores_case_for_case_insensitive_fs(tmp_dir: str):
assert venv_name1 == venv_name2
else:
assert venv_name1 != venv_name2
+
+
+@pytest.fixture()
+def extended_without_setup_poetry() -> Poetry:
+ poetry = Factory().create_poetry(
+ Path(__file__).parent.parent / "fixtures" / "extended_project_without_setup"
+ )
+
+ return poetry
+
+
+def test_build_environment_called_build_script_specified(
+ mocker: MockerFixture, extended_without_setup_poetry: Poetry, tmp_dir: str
+):
+ project_env = MockEnv(path=Path(tmp_dir) / "project")
+ ephemeral_env = MockEnv(path=Path(tmp_dir) / "ephemeral")
+
+ mocker.patch(
+ "poetry.utils.env.ephemeral_environment"
+ ).return_value.__enter__.return_value = ephemeral_env
+
+ with build_environment(extended_without_setup_poetry, project_env) as env:
+ assert env == ephemeral_env
+ assert env.executed == [
+ [
+ "python",
+ env.pip_embedded,
+ "install",
+ "--disable-pip-version-check",
+ "--ignore-installed",
+ *extended_without_setup_poetry.pyproject.build_system.requires,
+ ]
+ ]
+
+
+def test_build_environment_not_called_without_build_script_specified(
+ mocker: MockerFixture, poetry: Poetry, tmp_dir: str
+):
+ project_env = MockEnv(path=Path(tmp_dir) / "project")
+ ephemeral_env = MockEnv(path=Path(tmp_dir) / "ephemeral")
+
+ mocker.patch(
+ "poetry.utils.env.ephemeral_environment"
+ ).return_value.__enter__.return_value = ephemeral_env
+
+ with build_environment(poetry, project_env) as env:
+ assert env == project_env
+ assert not env.executed
diff --git a/tests/utils/test_setup_reader.py b/tests/utils/test_setup_reader.py
index 7dbf2ba91ae..d72e5386275 100644
--- a/tests/utils/test_setup_reader.py
+++ b/tests/utils/test_setup_reader.py
@@ -2,7 +2,7 @@
import os
-from typing import Callable
+from typing import TYPE_CHECKING
import pytest
@@ -11,6 +11,10 @@
from poetry.utils.setup_reader import SetupReader
+if TYPE_CHECKING:
+ from collections.abc import Callable
+
+
@pytest.fixture()
def setup() -> Callable[[str], str]:
def _setup(name: str) -> str: