diff --git a/.github/ISSUE_TEMPLATE/Feature_request.md b/.github/ISSUE_TEMPLATE/Feature_request.md index e02f479c4a..e8db8a47d3 100644 --- a/.github/ISSUE_TEMPLATE/Feature_request.md +++ b/.github/ISSUE_TEMPLATE/Feature_request.md @@ -5,7 +5,7 @@ about: Suggest an idea for this project Be sure to check the existing issues (both open and closed!), and make sure you are running the latest version of Pipenv. -Check the [diagnose documentation](https://pipenv.pypa.io/en/latest/diagnose/) for common issues and the [PEEP list](https://github.com/pypa/pipenv/blob/master/peeps/) before posting! We may close your issue if it is very similar to one of them. Please be considerate and follow the PEEP process, or be on your way. +Check the [diagnose documentation](https://pipenv.pypa.io/en/latest/diagnose/) for common issues as well as the GitHub Issues page. Make sure to mention your debugging experience if the documented solution failed. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f7c77d1354..b5a92992e9 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -7,9 +7,7 @@ What is the thing you want to fix? Is it associated with an issue on GitHub? Ple Always consider opening an issue first to describe your problem, so we can discuss what is the best way to amend it. Note that if you do not describe the goal of this change or link to a related issue, the maintainers may close the PR without further review. -If your pull request makes a non-insignificant change to Pipenv, such as the user interface or intended functionality, please file a PEEP. - - https://github.com/pypa/pipenv/blob/master/peeps/PEEP-000.md +If your pull request makes a significant change to Pipenv, such as the user interface or intended functionality, please open a discussion or issue report first. ### The fix diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d502f32612..a38021c9e0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -22,7 +22,6 @@ on: - "**/*.txt" - "examples/**" - "news/**" - - "peeps/**" branches: - main pull_request: @@ -36,7 +35,6 @@ on: - "**/*.txt" - "examples/**" - "news/**" - - "peeps/**" permissions: contents: read # to fetch code (actions/checkout) jobs: diff --git a/MANIFEST.in b/MANIFEST.in index 24bc6e9a04..c3cd9e40c1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -23,7 +23,6 @@ recursive-include docs/_templates *.html recursive-include docs/_static *.js *.css *.png recursive-exclude tests/test_artifacts *.pyd *.so *.pyc *.egg-info PKG-INFO -prune peeps prune .azure-pipelines prune .github prune pipenv/vendor/importlib_metadata/tests diff --git a/news/6281.bugfix.rst b/news/6281.bugfix.rst new file mode 100644 index 0000000000..de5467bc93 --- /dev/null +++ b/news/6281.bugfix.rst @@ -0,0 +1,18 @@ +Fix dependency resolution edge cases and versioning constraints handling: +* Allow JSON format options for ``--reverse`` dependency graph output matching pipdeptree +* Improve installation and upgrade routines to better handle dependencies +* Add ability to specify json output as pipdeptree does +* Add more consistent handling of VCS dependencies and references +* Fix synchronization of development and default dependencies during updates +* Ensure proper propagation of version constraints during updates +* Fix handling of ``~=`` and other version specifiers during updates + +Key Changes: +* Improved reverse dependency analysis to catch conflicts earlier in resolution +* Better handling of VCS package lock data, preserving refs and subdirectories +* Fixed issue where VCS references could be lost in lock file when installed via commit hash +* Better handling of pipfile categories during installation and updates +* Corrected logic for development dependency resolution and constraint propagation +* Improved validation and preservation of version specifiers during updates + +This improves stability when working with complex dependency trees and version constraints. diff --git a/peeps/PEEP-000.md b/peeps/PEEP-000.md deleted file mode 100644 index 5f8a29786b..0000000000 --- a/peeps/PEEP-000.md +++ /dev/null @@ -1,9 +0,0 @@ -# PEEP-000: Pipenv Enhancement Proposals - -**ACCEPTED** - -Pipenv Enhancement Proposals (PEEPs) will be used for proposing any changes to Pipenv's user–interface or intended functionality. - -☤ - -This is the first PEEP. All non–insignificant changes to Pipenv will require a PEEP. The governance model for Pipenv, including the process for future PEEP acceptance will be defined in [PEEP-001](https://github.com/pypa/pipenv/blob/master/peeps/PEEP-001.md). diff --git a/peeps/PEEP-001.md b/peeps/PEEP-001.md deleted file mode 100644 index 39a9719ceb..0000000000 --- a/peeps/PEEP-001.md +++ /dev/null @@ -1,23 +0,0 @@ -# PEEP-001: Pipenv Governance Model - -**ACCEPTED** - -Pipenv will be governed by a BDFL (Kenneth Reitz), and a board of maintainers (trusted collaborators to the project on GitHub). - -☤ - -## BDFL Responsibility & PEEP Acceptance - -The approval of future PEEPs is the responsiblity of the BDFL, unless they defer the decision–making to the trusted board of maintainers, for a given PEEP. The BDFL may write new PEEPs at any time. - -## Trusted Maintainer Responsibility - -- Maintain Pipenv's already–established interface — prevent it from eroding. -- Polish what already exists. Do not craft. -- Enforce the PEEP process. - -## Community Responsibility - -- Reduce the burden on the maintainers by not requesting new features or changes in functionality, unless by the PEEP process. -- Avoid writing PEEPs, unless it seems absolutely neccessary, and in the best interest of the entire Python community. -- Remember, all deltas are risks. diff --git a/peeps/PEEP-002.md b/peeps/PEEP-002.md deleted file mode 100644 index d406e04f72..0000000000 --- a/peeps/PEEP-002.md +++ /dev/null @@ -1,33 +0,0 @@ -# PEEP-002: Specify options via environment variables - -**ACCEPTED** (being implemented) - -This PEEP describes an addition that would allow configuring Pipenv options via environment variables suitable especially for automated systems or CI/CD systems. - -☤ - -Systems running not only on containerized solutions (like Kubernetes or OpenShift) are often parametrized via environment variables. The aim of this PEEP is to provide an extension to the current Pipenv implementation that would simplify parametrizing options passed via environment variables. - -The current implementation requires most of the options to be passed via command line. It is possible to adjust some of the command line options via pre-defined names of environment variables (such as ``PIPENV_PYTHON``) but this approach does not allow to define environment variables for all of the options that can be possibly passed to Pipenv. - -The proposed approach is to re-use existing options passing via environment variables avaliable in [click](http://click.pocoo.org/5/options/#values-from-environment-variables>) (bundled with Pipenv). All of the options for available Pipenv's sub-commands can directly pick options passed via environment variables: - -```console -$ export PIPENV_INSTALL_DEPLOY=1 -$ export PIPENV_INSTALL_VERBOSE=1 -$ pipenv install -``` - -The naming schema for environment variables configuring options is following: - -``` -PIPENV__ -``` - -where sub-command is an uppercase name of Pipenv's sub-command (such as `install`, `run` or others) and option name is the name of Pipenv's sub-command option all in uppercase. Any dashes are translated to underscores; flags accept `1` signalizing the flag to be present. - -The naming schema guarantees no clashes for the already existing Pipenv configuration using environment variables. - -The proposed configuration via environment variables is available for Pipenv sub-commands. Options supplied via command line have higher priority than the ones supplied via environment variables. - -Author: Fridolín Pokorný diff --git a/peeps/PEEP-003.md b/peeps/PEEP-003.md deleted file mode 100644 index 7744e7287a..0000000000 --- a/peeps/PEEP-003.md +++ /dev/null @@ -1,9 +0,0 @@ -# PEEP-003: Revocation of Power of BDFL - -**ACCEPTED** - -Pipenv will be governed by a board of maintainers (trusted collaborators to the project on GitHub), not a BDFL. - -The BDFL retains his title, however, revokes himself of his powers. - -PEEP approval will be determined by available members of the board of maintainers, in private or public channels. diff --git a/peeps/PEEP-004.md b/peeps/PEEP-004.md deleted file mode 100644 index a3dc8c3304..0000000000 --- a/peeps/PEEP-004.md +++ /dev/null @@ -1,9 +0,0 @@ -## PEEP-004: Subcommands - -NOT YET ACCEPTED - -Pipenv will automatically run commands like "pipenv deploy" if the "pipenv-deploy" binary is available on the path. - -These subcommands cannot overwrite built-in commands. - -These subcommands will receive environment variables with contextual information. diff --git a/peeps/PEEP-005.md b/peeps/PEEP-005.md deleted file mode 100644 index 2cc0279f91..0000000000 --- a/peeps/PEEP-005.md +++ /dev/null @@ -1,65 +0,0 @@ -# PEEP-005: Do Not Remove Entries from the Lockfile When Using `--keep-outdated` - -**PROPOSED** - -This PEEP describes a change that would retain entries in the Lockfile even if they were not returned during resolution when the user passes the `--keep-outdated` flag. - -☤ - -The `--keep-outdated` flag is currently provided by Pipenv for the purpose of holding back outdated dependencies (i.e. dependencies that are not newly introduced). This proposal attempts to identify the reasoning behind the flag and identifies a need for a project-wide scoping. Finally, this proposal outlines the expected behavior of `--keep-outdated` under the specified circumstances, as well as the required changes to achieve full implementation. - -## Retaining Outdated Dependencies - -The purpose of retaining outdated dependencies is to allow the user to introduce a new package to their environment with a minimal impact on their existing environment. In an effort to achieve this, `keep_outdated` was proposed as both a flag and a Pipfile setting [in this issue](https://github.com/pypa/pipenv/issues/1255#issuecomment-354585775), originally described as follows: - -> pipenv lock --keep-outdated to request a minimal update that only adjusts the lock file to account for Pipfile changes (additions, removals, and changes to version constraints)... and pipenv install --keep-outdated needed to request only the minimal changes required to satisfy the installation request - -However, the current implementation always fully re-locks, rather than only locking the new dependencies. As a result, dependencies in the `Pipfile.lock` with markers for a python version different from that of the running interpreter will be removed, even if they have nothing to do with the current changeset. For instance, say you have the following dependency in your `Pipfile.lock`: - -```json -{ - "default": { - "backports.weakref": { - "hashes": [...], - "version": "==1.5", - "markers": "python_version<='3.4'" - } - } -} -``` - -If this lockfile were to be re-generated with Python 3, even with `--keep-outdated`, this entry would be removed. This makes it very difficult to maintain lockfiles which are compatible across major python versions, yet all that would be required to correct this would be a tweak to the implementation of `keep-outdated`. I believe this was the goal to begin with, but I feel this behavior should be documented and clarified before moving forward. - -## Desired Behavior - -1. The only changes that should occur in `Pipfile.lock` when `--keep-outdated` is passed should be changes resulting from new packages added or pin changes in the project `Pipfile`; -2. Existing packages in the project `Pipfile.lock` should remain in place, even if they are not returned during resolution; -3. New dependencies should be written to the lockfile; -4. Conflicts should be resolved as outlined below. - -## Conflict Resolution - -If a conflict should occur due to the presence in the `Pipfile.lock` of a dependency of a new package, the following steps should be undertaken before alerting the user: - -1. Determine whether the previously locked version of the dependency meets the constraints required of the new package; if so, pin that version; -2. If the previously locked version is not present in the `Pipfile` and is not a dependency of any other dependencies (i.e. has no presence in `pipenv graph`, etc), update the lockfile with the new version; -3. If there is a new or existing dependency which has a conflict with existing entries in the lockfile, perform an intermediate resolution step by checking: - a. If the new dependency can be satisfied by existing installs; - b. Whether conflicts can be upgraded without affecting locked dependencies; - c. If locked dependencies must be upgraded, whether those dependencies ultimately have any dependencies in the `Pipfile`; - d. If a traversal up the graph lands in the `Pipfile`, create _abstract dependencies_ from the `Pipfile` entries and determine whether they will still be satisfied by the new version; - e. If a new pin is required, ensure that any subdependencies of the newly pinned dependencies are therefore also re-pinned (simply prefer the updated lockfile instead of the cached version); - -4. Raise an Exception alerting the user that they either need to do a full lock or manually pin a version. - -## Necessary Changes - -In order to make these changes, we will need to modify the dependency resolution process. Overall, locking will require the following implementation changes: - -1. The ability to restore any entries that would otherwise be removed when the `--keep-outdated` flag is passed. The process already provides a caching mechanism, so we simply need to restore missing cache keys; -2. Conflict resolution steps: - a. Check an abstract dependency/candidate against a lockfile entry; - b. Requirements mapping for each dependency in the environment to determine if a lockfile entry is a descendent of any other entries; - - -Author: Dan Ryan diff --git a/peeps/PEEP-006.md b/peeps/PEEP-006.md deleted file mode 100644 index 5a3739e1e9..0000000000 --- a/peeps/PEEP-006.md +++ /dev/null @@ -1,62 +0,0 @@ -# PEEP-006: Include all deps in output of `pipenv lock -r --dev` - -This proposal makes the behavior of `pipenv lock --requirements --dev` -consistent with the behaviour of other commands: converting all dependencies, -not just the development dependencies. - -☤ - -If you type `pipenv lock --help` the help document says: - -```bash --d, --dev Install both develop and default packages. [env var:PIPENV_DEV] -``` - -That is not accurate and confusing for `pipenv lock -r`, which only produces the develop requirments. - -This PEEP proposes to change the behavior of `pipenv lock -r -d` to produce **all** requirements, both develop -and default. The help string of `-d/--dev` will be changed to **"Generate both develop and default requirements"**. - -As the existing behaviour was intended to support generating traditional `dev-requirements.txt` -files, a new flag, `--dev-only`, will be introduced to restrict output to development requirements only. - -When the new `pipenv lock` specific flag is used, the common `-d/--dev` flag is redundant, but -ignored (i.e. `pipenv lock -r --dev-only` and `pipenv lock -r --dev --dev-only` do the same thing). -If `--dev-only` is specified without `-r/--requirements`, then `PipenvOptionsError` will be thrown. - -As part of this change, `pipenv lock --requirements` will be updated to emit a comment header -indicating that the file was autogenerated, and the options passed to `pipenv lock`. This will use -the following `pip-compile` inspired format: - - # - # These requirements were autogenerated by pipenv - # To regenerate from the project's Pipfile, run: - # - # pipenv lock --requirements - # - -`--dev` or `--dev-only` will be append to the emitted regeneration command if -those options are set. - -To allow this new header to be turned off, `pipenv lock --requirements` will also support the same -`--header/--no-header` options that `pip-compile` offers. - -In the first release including this change, and in releases for at least 6 months from that date, -the emitted header will include the following note when the `--dev` option is set: - - # Note: in pipenv 2020.x, "--dev" changed to emit both default and development - # requirements. To emit only development requirements, pass "--dev-only". - -## Impact - -The users relying on the old behavior will get more requirements listed in the -``dev-requirements.txt`` file, which in most cases is harmless. They can pass -the `--dev-only` flag after updating `pipenv` to achieve the same thing as before. - -## Related issues: - -- #3316 - -## Related pull requests: - -- #4183 diff --git a/peeps/PEEP-044.md b/peeps/PEEP-044.md deleted file mode 100644 index 38d60dd3bb..0000000000 --- a/peeps/PEEP-044.md +++ /dev/null @@ -1,54 +0,0 @@ -# PEEP-044: safety-db integration, squelch, and output. - -pipenv check needs offline, ci, and other output capabilities. - -☤ - -Not everyone can utilize pipenv check and access the internet. Safety check knew this -and that is why they created safety-db. This repository contains a json database that -is updated monthly. Safety check allows you to pass a --db flag that is a local directory -containing that database. Safety check also allows you to pass --json, --bare, and ---full-report. Pipenv check has their own way of displaying the results that is why I -believe there should be a --output flag that allows users to specify json, bare, -and full-report from safety check and default for the current pipenv check output. -Currently, pipenv check has a lot of stdout messages and makes it harder to pipe -the results into something to be checked (especially for continuous integration -pipelines). That is why adding a --squelch switch is also important. This will be -default False (display all stdout); however, the user has the option to add the ---squelch switch to make the output only come from safety check. - -## Current implementation: -### Example 1 -``` bash -pipenv check -Checking PEP 508 requirements... -Passed! -Checking installed package safety... -25853: insecure-package <0.2.0 resolved (0.1.0 installed)! -This is an insecure package with lots of exploitable security vulnerabilities. -``` -### Example 2 -``` bash -pipenv check | jq length -parse error: Invalid numeric literal at line 1, column 9 -``` - -## Future implementation: -### Example 1 -``` bash -pipenv check --db /Users/macbookpro/workspace/test/safety-db/data/ --output json --squelch -[ - [ - "insecure-package", - "<0.2.0", - "0.1.0", - "This is an insecure package with lots of exploitable security vulnerabilities.", - "25853" - ] -] -``` -### Example 2 -``` bash -pipenv check --db /Users/macbookpro/workspace/test/safety-db/data/ --output json --squelch | jq length -1 -``` diff --git a/peeps/PEEP-TEMPLATE.md b/peeps/PEEP-TEMPLATE.md deleted file mode 100644 index 8c5e3a9f6e..0000000000 --- a/peeps/PEEP-TEMPLATE.md +++ /dev/null @@ -1,9 +0,0 @@ -# PEEP-042: Title Goes Here - -A brief, one–sentence description goes here. - -☤ - -A longer (but as concise as possible) description goes here. - -Code blocks, lists, and other Markdown features are encouraged to be used, when needed. diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 247672d811..29fd85ed65 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -219,7 +219,7 @@ def install(state, **kwargs): editable_packages=state.installstate.editables, site_packages=state.site_packages, extra_pip_args=state.installstate.extra_pip_args, - categories=state.installstate.categories, + pipfile_categories=state.installstate.categories, skip_lock=state.installstate.skip_lock, ) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index d7cd22a319..0236bd195e 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -3,7 +3,9 @@ import logging import os import sys -from functools import cached_property +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, List, Optional, Set def _ensure_modules(): @@ -87,561 +89,337 @@ def handle_parsed_args(parsed): return parsed -class Entry: - """A resolved entry from a resolver run""" - - def __init__( - self, name, entry_dict, project, resolver, reverse_deps=None, category=None - ): - super().__init__() - from pipenv.utils.dependencies import ( - get_lockfile_section_using_pipfile_category, - ) - from pipenv.utils.toml import tomlkit_value_to_python - - self.name = name - if isinstance(entry_dict, dict): - self.entry_dict = self.clean_initial_dict(entry_dict) - else: - self.entry_dict = entry_dict - self.project = project - self.category = category - self.lockfile_section = get_lockfile_section_using_pipfile_category(category) - self.pipfile = tomlkit_value_to_python(project.parsed_pipfile.get(category, {})) - self.lockfile = project.lockfile_content.get(self.lockfile_section, {}) - self.pipfile_dict = self.pipfile.get(self.pipfile_name, {}) - if self.category != "packages" and self.name in project.lockfile_content.get( - "default", {} - ): - self.lockfile_dict = project.lockfile_content["default"][name] - else: - self.lockfile_dict = self.lockfile.get(name, entry_dict) - self.resolver = resolver - self.reverse_deps = reverse_deps - self._original_markers = None - self._markers = None - self._entry = None - self._lockfile_entry = None - self._pipfile_entry = None - self._parent_deps = [] - self._flattened_parents = [] - self._requires = None - self._deptree = None - self._parents_in_pipfile = [] - - @staticmethod - def make_requirement(name=None, entry=None): - from pipenv.utils.dependencies import from_pipfile - - return from_pipfile(name, entry) - - @classmethod - def clean_initial_dict(cls, entry_dict): - from pipenv.patched.pip._vendor.packaging.requirements import Requirement - - version = entry_dict.get("version", "") - if isinstance(version, Requirement): - version = str(version.specifier) - entry_dict["version"] = cls.clean_specifier(version) - if "name" in entry_dict: - del entry_dict["name"] - return entry_dict - - @classmethod - def parse_expressions(cls, expr_iterable): - keys = [] - expr_list = [] - expr = expr_iterable.copy() - if isinstance(expr, list): - expr_list = expr - if expr_list: - for part in expr_list: - keys.extend(cls.parse_expressions(part)) - return keys - - @classmethod - def get_markers_from_dict(cls, entry_dict): - from pipenv.patched.pip._vendor.packaging._parser import ( - parse_marker as packaging_parse_marker, - ) - from pipenv.utils.markers import normalize_marker_str - - markers = set() +@dataclass +class PackageSource: + """Represents the source/origin of a package.""" - # If no markers are present, skip marker parsing - if not any( - k in entry_dict - for k in ["sys_platform", "python_version", "os_name", "platform_machine"] - ): - return None, entry_dict - - # Otherwise, proceed to parse markers from entry_dict - marker_expression = packaging_parse_marker(entry_dict.get("markers", "")) - - # Parse the marker expressions using the new packaging marker parser - marker_keys = cls.parse_expressions(marker_expression) - - # Identify relevant marker keys present in entry_dict - keys_in_dict = [k for k in marker_keys if k in entry_dict] - - # Normalize and add the markers from entry_dict - markers = {normalize_marker_str(f"{k} {entry_dict.pop(k)}") for k in keys_in_dict} - - # Handle "markers" field if it exists in the dictionary - if "markers" in entry_dict: - markers.add(normalize_marker_str(entry_dict["markers"])) - - # Remove None from the set if present - if None in markers: - markers.remove(None) - - # If there are any markers left, join them with "and" - if markers: - entry_dict["markers"] = " and ".join(list(markers)) - else: - markers = None - - return markers, entry_dict + index: Optional[str] = None + url: Optional[str] = None + vcs: Optional[str] = None + ref: Optional[str] = None + path: Optional[Path] = None + subdirectory: Optional[str] = None @property - def markers(self): - self._markers, self.entry_dict = self.get_markers_from_dict(self.entry_dict) - return self._markers - - @markers.setter - def markers(self, markers): - if not markers: - marker_str = self.marker_to_str(markers) - if marker_str: - self.entry.merge_markers(marker_str) - self._markers = self.marker_to_str(self._entry.markers) - entry_dict = self.entry_dict.copy() - entry_dict["markers"] = self.marker_to_str(self._entry.markers) - self.entry_dict = entry_dict + def is_vcs(self) -> bool: + return bool(self.vcs) @property - def original_markers(self): - original_markers, lockfile_dict = self.get_markers_from_dict(self.lockfile_dict) - self.lockfile_dict = lockfile_dict - self._original_markers = self.marker_to_str(original_markers) - return self._original_markers + def is_local(self) -> bool: + return bool(self.path) - @staticmethod - def marker_to_str(marker): - from pipenv.utils.markers import normalize_marker_str - if not marker: - return None - from collections.abc import Mapping - - marker_str = None - if isinstance(marker, Mapping): - marker_dict, _ = Entry.get_markers_from_dict(marker) - if marker_dict: - marker_str = f"{marker_dict.popitem()[1]}" - elif isinstance(marker, (list, set, tuple)): - marker_str = " and ".join([normalize_marker_str(m) for m in marker if m]) - elif isinstance(marker, str): - marker_str = f"{normalize_marker_str(marker)}" - if isinstance(marker_str, str): - return marker_str - return None - - @cached_property - def get_cleaned_dict(self): - from pipenv.utils.constants import VCS_LIST - - self.validate_constraints() - if self.entry.extras != self.lockfile_entry.extras: - entry_extras = list(self.entry.extras) - if self.lockfile_entry.extras: - entry_extras.extend(list(self.lockfile_entry.extras)) - self.entry_dict["extras"] = entry_extras - if self.original_markers and not self.markers: - original_markers = self.marker_to_str(self.original_markers) - self.markers = original_markers - self.entry_dict["markers"] = self.marker_to_str(original_markers) - entry_hashes = set(self.entry_dict.get("hashes", [])) - self.entry_dict["hashes"] = sorted(entry_hashes) - self.entry_dict["name"] = self.name - if "version" in self.entry_dict: - self.entry_dict["version"] = self.strip_version(self.entry_dict["version"]) - _, self.entry_dict = self.get_markers_from_dict(self.entry_dict) - if self.resolver.index_lookup.get(self.name): - self.entry_dict["index"] = self.resolver.index_lookup[self.name] - - # Handle VCS entries - for key in VCS_LIST: - if key in self.lockfile_dict: - self.entry_dict[key] = self.lockfile_dict[key] - self.entry_dict.pop("version", None) - return self.entry_dict +@dataclass +class PackageRequirement: + """Core package requirement information.""" - @property - def lockfile_entry(self): - if self._lockfile_entry is None: - self._lockfile_entry = self.make_requirement(self.name, self.lockfile_dict) - return self._lockfile_entry + name: str + version: Optional[str] = None + extras: Set[str] = field(default_factory=set) + markers: Optional[str] = None + hashes: Set[str] = field(default_factory=set) + source: PackageSource = field(default_factory=PackageSource) - @lockfile_entry.setter - def lockfile_entry(self, entry): - self._lockfile_entry = entry + def __post_init__(self): + if isinstance(self.extras, list): + self.extras = set(self.extras) + if isinstance(self.hashes, list): + self.hashes = set(self.hashes) - @property - def pipfile_entry(self): - if self._pipfile_entry is None: - self._pipfile_entry = self.make_requirement( - self.pipfile_name, self.pipfile_dict - ) - return self._pipfile_entry - - @property - def entry(self): - return self.make_requirement(self.name, self.lockfile_dict) - - @property - def normalized_name(self): - return self.entry.normalized_name - - @property - def pipfile_name(self): - return self.project.get_package_name_in_pipfile(self.name, category=self.category) - @property - def is_in_pipfile(self): - return bool(self.pipfile_name) - - @property - def pipfile_packages(self): - return self.project.pipfile_package_names[self.category] - - def create_parent(self, name, specifier="*"): - parent = self.create( - name, specifier, self.project, self.resolver, self.reverse_deps, self.category +@dataclass +class Entry: + """Represents a resolved package entry with its dependencies and constraints.""" + + name: str + entry_dict: Dict[str, Any] + project: Any # Could be more specific with a Project type + resolver: Any # Could be more specific with a Resolver type + reverse_deps: Optional[Dict[str, Any]] = None + category: Optional[str] = None + + def __post_init__(self): + """Initialize derived attributes after dataclass initialization.""" + self.lockfile_section = self._get_lockfile_section() + self.pipfile = self._get_pipfile_content() + self.requirement = self._build_requirement() + + def _build_requirement(self) -> PackageRequirement: + """Construct a PackageRequirement from entry data.""" + # Extract VCS information + vcs_info = self._extract_vcs_info() + source = PackageSource( + index=self.resolver.index_lookup.get(self.name), **vcs_info ) - parent._deptree = self.deptree - return parent - @property - def deptree(self): - if not self._deptree: - self._deptree = self.project.environment.get_package_requirements() - return self._deptree + # Clean and normalize version + version = self._clean_version(self.entry_dict.get("version")) + + # Build the core requirement + return PackageRequirement( + name=self.name, + version=version, + extras=set(self.entry_dict.get("extras", [])), + markers=self._clean_markers(), + hashes=set(self.entry_dict.get("hashes", [])), + source=source, + ) - @classmethod - def create( - cls, name, entry_dict, project, resolver, reverse_deps=None, category=None - ): - return cls(name, entry_dict, project, resolver, reverse_deps, category) + def _extract_vcs_info(self) -> Dict[str, Optional[str]]: + """Extract VCS information from entry dict and lockfile.""" + vcs_info = {} + vcs_keys = {"git", "hg", "svn", "bzr"} - @staticmethod - def clean_specifier(specifier): - from pipenv.patched.pip._vendor.packaging.specifiers import Specifier + # Check both entry_dict and lockfile_dict for VCS info + for key in vcs_keys: + if key in self.entry_dict: + vcs_info["vcs"] = key + vcs_info["url"] = self.entry_dict[key] + vcs_info["ref"] = self.entry_dict.get("ref") + vcs_info["subdirectory"] = self.entry_dict.get("subdirectory") + break - if not any(specifier.startswith(k) for k in Specifier._operators): - if specifier.strip().lower() in ["any", "", "*"]: - return "*" - specifier = f"=={specifier}" - elif specifier.startswith("==") and specifier.count("=") > 3: - specifier = f"=={specifier.lstrip('=')}" - return specifier + return vcs_info @staticmethod - def strip_version(specifier): - from pipenv.patched.pip._vendor.packaging.specifiers import Specifier - - op = next(iter(k for k in Specifier._operators if specifier.startswith(k)), None) - if op: - specifier = specifier[len(op) :] - while op: - op = next( - iter(k for k in Specifier._operators if specifier.startswith(k)), - None, - ) - if op: - specifier = specifier[len(op) :] - return specifier - - @property - def parent_deps(self): - if not self._parent_deps: - self._parent_deps = self.get_parent_deps(unnest=False) - return self._parent_deps - - @property - def flattened_parents(self): - if not self._flattened_parents: - self._flattened_parents = self.get_parent_deps(unnest=True) - return self._flattened_parents - - @property - def parents_in_pipfile(self): - if not self._parents_in_pipfile: - self._parents_in_pipfile = [ - p - for p in self.flattened_parents - if p.normalized_name in self.pipfile_packages - ] - return self._parents_in_pipfile - - @property - def is_updated(self): - return self.entry.specifiers != self.lockfile_entry.specifiers - - @property - def requirements(self): - if not self._requires: - self._requires = next( - iter(self.project.environment.get_package_requirements(self.name)), {} - ) - return self._requires + def _clean_version(version: Optional[str]) -> Optional[str]: + """Clean and normalize version strings.""" + if not version: + return None + if version.strip().lower() in {"any", "", "*"}: + return "*" + if not any( + version.startswith(op) for op in ("==", ">=", "<=", "~=", "!=", ">", "<") + ): + version = f"=={version}" + return version + + def _clean_markers(self) -> Optional[str]: + """Clean and normalize marker strings.""" + markers = [] + marker_keys = { + "sys_platform", + "python_version", + "os_name", + "platform_machine", + "markers", + } + + for key in marker_keys: + if key in self.entry_dict: + value = self.entry_dict.pop(key) + if value and key != "markers": + markers.append(f"{key} {value}") + elif value: # key == "markers" + markers.append(value) + + return " and ".join(markers) if markers else None + + def _get_lockfile_section(self) -> str: + """Get the appropriate lockfile section based on category.""" + from pipenv.utils.dependencies import get_lockfile_section_using_pipfile_category + + return get_lockfile_section_using_pipfile_category(self.category) + + def _get_pipfile_content(self) -> Dict[str, Any]: + """Get and normalize pipfile content.""" + from pipenv.utils.toml import tomlkit_value_to_python - @property - def updated_version(self): - version = str(self.entry.specifier) - return self.strip_version(version) + return tomlkit_value_to_python(self.project.parsed_pipfile.get(self.category, {})) @property - def updated_specifier(self) -> str: - return str(self.entry.specifier) + def get_cleaned_dict(self) -> Dict[str, Any]: + """Create a cleaned dictionary representation of the entry.""" + cleaned = { + "name": self.name, + "version": self.requirement.version, + "extras": ( + sorted(self.requirement.extras) if self.requirement.extras else None + ), + "markers": self.requirement.markers, + "hashes": ( + sorted(self.requirement.hashes) if self.requirement.hashes else None + ), + "subdirectory": self.requirement.source.subdirectory, + "editable": self.entry_dict.get("editable", None), + "path": self.requirement.source.path, + "file": self.requirement.source.path, + } + + # Add index if present + if self.requirement.source.index: + cleaned["index"] = self.requirement.source.index + + # Add VCS information if present + if self.requirement.source.is_vcs: + cleaned[self.requirement.source.vcs] = self.requirement.source.url + if self.entry_dict.get("ref"): + cleaned["ref"] = self.entry_dict["ref"] + elif self.requirement.source.ref: + cleaned["ref"] = self.requirement.source.ref + cleaned.pop("version", None) # Remove version for VCS entries + + # Clean up None values + return {k: v for k, v in cleaned.items() if v is not None} + + def validate_constraints(self) -> bool: + """Validate that all constraints are satisfied.""" + from pipenv.exceptions import DependencyConflict + from pipenv.patched.pip._vendor.packaging.requirements import Requirement - @property - def original_specifier(self) -> str: - return self.lockfile_entry.specifiers + constraints = self.resolver.parsed_constraints + version = self.requirement.version - @property - def original_version(self): - if self.original_specifier: - return self.strip_version(self.original_specifier) - return None - - def validate_specifiers(self): - if self.is_in_pipfile and not self.pipfile_entry.editable: - return self.pipfile_entry.requirement.specifier.contains(self.updated_version) - return True + if not version: + return True - def get_dependency(self, name): - if self.requirements: - return next( - iter( - dep - for dep in self.requirements.get("dependencies", []) - if dep and dep.get("package_name", "") == name - ), - {}, - ) - return {} - - def get_parent_deps(self, unnest=False): - from pipenv.patched.pip._vendor.packaging.specifiers import Specifier - - parents = [] - for spec in self.reverse_deps.get(self.normalized_name, {}).get("parents", set()): - spec_match = next(iter(c for c in Specifier._operators if c in spec), None) - name = spec - parent = None - if spec_match is not None: - spec_index = spec.index(spec_match) - specifier = self.clean_specifier( - spec[spec_index : len(spec_match)] - ).strip() - name_start = spec_index + len(spec_match) - name = spec[name_start:].strip() - parent = self.create_parent(name, specifier) - else: - name = spec - parent = self.create_parent(name) - if parent is not None: - parents.append(parent) - if not unnest or parent.pipfile_name is not None: - continue - if self.reverse_deps.get(parent.normalized_name, {}).get("parents", set()): - parents.extend(parent.flattened_parents) - return parents - - def get_constraints(self): - """ - Retrieve all of the relevant constraints, aggregated from the pipfile, resolver, - and parent dependencies and their respective conflict resolution where possible. - - :return: A set of **InstallRequirement** instances representing constraints - :rtype: Set - """ - return self.resolver.parsed_constraints - - def get_pipfile_constraint(self): - """ - Retrieve the version constraint from the pipfile if it is specified there, - otherwise check the constraints of the parent dependencies and their conflicts. - - :return: An **InstallRequirement** instance representing a version constraint - """ - if self.is_in_pipfile: - return self.pipfile_entry - - def validate_constraints(self): - """ - Retrieves the full set of available constraints and iterate over them, validating - that they exist and that they are not causing unresolvable conflicts. - - :return: True if the constraints are satisfied by the resolution provided - :raises: :exc:`pipenv.exceptions.DependencyConflict` if the constraints dont exist - """ - from pipenv.exceptions import DependencyConflict - from pipenv.patched.pip._vendor.packaging.requirements import Requirement - from pipenv.utils import err + # Remove any operator from version for comparison + clean_version = self._strip_version(version) - constraints = self.get_constraints() - pinned_version = self.updated_version for constraint in constraints: if not isinstance(constraint, Requirement): continue - if pinned_version and not constraint.specifier.contains( - str(pinned_version), prereleases=True - ): - if self.project.s.is_verbose(): - err.print(f"Tried constraint: {constraint!r}") - msg = ( - f"Cannot resolve conflicting version {self.name}{constraint.specifier} " - f"while {self.name}{self.updated_specifier} is locked." - ) - raise DependencyConflict(msg) - return True - def check_flattened_parents(self): - for parent in self.parents_in_pipfile: - if not parent.updated_specifier: + if not constraint.name == self.name: continue - if not parent.validate_specifiers(): - from pipenv.exceptions import DependencyConflict + if not constraint.specifier.contains(clean_version, prereleases=True): msg = ( - f"Cannot resolve conflicting versions: (Root: {self.name}) " - f"{parent.pipfile_name}{parent.pipfile_entry.requirement.specifiers} (Pipfile) " - f"Incompatible with {parent.name}{parent.updated_specifiers} (resolved)\n" + f"Cannot resolve conflicting version {self.name}{constraint.specifier} " + f"while {self.name}=={clean_version} is locked." ) raise DependencyConflict(msg) + return True - def __getattribute__(self, key): - result = None - old_version = ["was_", "had_", "old_"] - new_version = ["is_", "has_", "new_"] - if any(key.startswith(v) for v in new_version): - entry = Entry.__getattribute__(self, "entry") - try: - keystart = key.index("_") + 1 - try: - result = getattr(entry, key[keystart:]) - except AttributeError: - result = getattr(entry, key) - except AttributeError: - result = super().__getattribute__(key) - return result - if any(key.startswith(v) for v in old_version): - lockfile_entry = Entry.__getattribute__(self, "lockfile_entry") - try: - keystart = key.index("_") + 1 - try: - result = getattr(lockfile_entry, key[keystart:]) - except AttributeError: - result = getattr(lockfile_entry, key) - except AttributeError: - result = super().__getattribute__(key) - return result - return super().__getattribute__(key) - - -def clean_results(results, resolver, project, category): - from pipenv.utils.dependencies import ( - translate_markers, - ) - - if not project.lockfile_exists: - return results + @staticmethod + def _strip_version(version: str) -> str: + """Remove version operators from a version string.""" + operators = {"==", ">=", "<=", "~=", "!=", ">", "<"} + for op in operators: + if version.startswith(op): + return version[len(op) :].strip() + return version.strip() + + +def process_resolver_results( + results: List[Dict[str, Any]], resolver: Any, project: Any, category: Optional[str] +) -> List[Dict[str, Any]]: + """ + Process the results from the dependency resolver into cleaned lockfile entries. + + Args: + results: Raw results from the resolver + resolver: The resolver instance that produced the results + project: The current project instance + category: The category of dependencies being processed + + Returns: + List of processed entries ready for the lockfile + """ + if not results: + return [] + + # Get reverse dependencies for the project reverse_deps = project.environment.reverse_dependencies() - new_results = [] + + processed_results = [] for result in results: - name = result.get("name") - entry_dict = result.copy() + # Create Entry instance with our new dataclass entry = Entry( - name, - entry_dict, - project, - resolver, + name=result["name"], + entry_dict=result, + project=project, + resolver=resolver, reverse_deps=reverse_deps, category=category, ) - entry_dict = translate_markers(entry.get_cleaned_dict) - new_results.append(entry_dict) - return new_results + + # Get the cleaned dictionary representation + cleaned_entry = entry.get_cleaned_dict + + # Validate the entry meets all constraints + entry.validate_constraints() + + processed_results.append(cleaned_entry) + + return processed_results def resolve_packages( - pre, - clear, - verbose, - system, - write, - requirements_dir, - packages, - category, - constraints=None, -): + pre: bool, + clear: bool, + verbose: bool, + system: bool, + write: Optional[str], + requirements_dir: Optional[str], + packages: Dict[str, Any], + pipfile_category: Optional[str], + constraints: Optional[Dict[str, Any]] = None, +) -> List[Dict[str, Any]]: + """ + Resolve package dependencies and return processed results. + + Args: + pre: Whether to include pre-release versions + clear: Whether to clear caches + verbose: Whether to output verbose logging + system: Whether to use system packages + write: Path to write results to + requirements_dir: Directory containing requirements files + packages: Package specifications to resolve + pipfile_category: Category of dependencies being processed + constraints: Additional constraints to apply + + Returns: + List of processed package entries + """ + from pipenv.project import Project from pipenv.utils.internet import create_mirror_source, replace_pypi_sources from pipenv.utils.resolver import resolve_deps + # Handle mirror configuration pypi_mirror_source = ( create_mirror_source(os.environ["PIPENV_PYPI_MIRROR"], "pypi_mirror") if "PIPENV_PYPI_MIRROR" in os.environ else None ) + # Update packages with constraints if provided if constraints: packages.update(constraints) - def resolve( - packages, pre, project, sources, clear, system, category, requirements_dir=None - ): - return resolve_deps( - packages, - which, - project=project, - pre=pre, - category=category, - sources=sources, - clear=clear, - allow_global=system, - req_dir=requirements_dir, - ) - - from pipenv.project import Project - + # Initialize project and configure sources project = Project() sources = ( replace_pypi_sources(project.pipfile_sources(), pypi_mirror_source) if pypi_mirror_source else project.pipfile_sources() ) - results, resolver = resolve( + + # Resolve dependencies + results, resolver = resolve_deps( packages, - pre=pre, - category=category, + which, project=project, + pre=pre, + pipfile_category=pipfile_category, sources=sources, clear=clear, - system=system, - requirements_dir=requirements_dir, + allow_global=system, + req_dir=requirements_dir, + ) + + # Process results + processed_results = process_resolver_results( + results, resolver, project, pipfile_category ) - results = clean_results(results, resolver, project, category) + + # Write results if requested if write: with open(write, "w") as fh: - if not results: - json.dump([], fh) - else: - json.dump(results, fh) - if results: - return results - return [] + json.dump(processed_results, fh) + + return processed_results def _main( diff --git a/pipenv/routines/graph.py b/pipenv/routines/graph.py index 63f63900f5..744397f371 100644 --- a/pipenv/routines/graph.py +++ b/pipenv/routines/graph.py @@ -1,12 +1,11 @@ import json as simplejson -import os import sys from pathlib import Path from pipenv import exceptions +from pipenv.utils import console, err from pipenv.utils.processes import run_command from pipenv.utils.requirements import BAD_PACKAGES -from pipenv.vendor import click def do_graph(project, bare=False, json=False, json_tree=False, reverse=False): @@ -14,79 +13,47 @@ def do_graph(project, bare=False, json=False, json_tree=False, reverse=False): from pipenv.vendor import pipdeptree - pipdeptree_path = os.path.dirname(pipdeptree.__file__.rstrip("cdo")) + pipdeptree_path = Path(pipdeptree.__file__).parent try: python_path = project.python() except AttributeError: - click.echo( - "{}: {}".format( - click.style("Warning", fg="red", bold=True), - "Unable to display currently-installed dependency graph information here. " - "Please run within a Pipenv project.", - ), - err=True, + err.print( + "[bold][red]Warning: Unable to display currently-installed dependency graph information here. " + "Please run within a Pipenv project.[/red][/bold]", ) sys.exit(1) except RuntimeError: pass - else: - if os.name != "nt": # bugfix #4388 - python_path = Path(python_path).as_posix() - pipdeptree_path = Path(pipdeptree_path).as_posix() - if reverse and json: - click.echo( - "{}: {}".format( - click.style("Warning", fg="red", bold=True), - "Using both --reverse and --json together is not supported. " - "Please select one of the two options.", - ), - err=True, - ) - sys.exit(1) - if reverse and json_tree: - click.echo( - "{}: {}".format( - click.style("Warning", fg="red", bold=True), - "Using both --reverse and --json-tree together is not supported. " - "Please select one of the two options.", - ), - err=True, - ) - sys.exit(1) + # Only keep the json + json_tree incompatibility check if json and json_tree: - click.echo( - "{}: {}".format( - click.style("Warning", fg="red", bold=True), - "Using both --json and --json-tree together is not supported. " - "Please select one of the two options.", - ), - err=True, + err.print( + "[bold][red]Warning: Using both --json and --json-tree together is not supported. " + "Please select one of the two options.[/red][/bold]", ) sys.exit(1) - flag = "" + + # Build command arguments list + cmd_args = [python_path, pipdeptree_path, "-l"] + + # Add flags as needed - multiple flags now supported if json: - flag = "--json" + cmd_args.append("--json") if json_tree: - flag = "--json-tree" + cmd_args.append("--json-tree") if reverse: - flag = "--reverse" + cmd_args.append("--reverse") + if not project.virtualenv_exists: - click.echo( - "{}: No virtualenv has been created for this project yet! Consider " - "running {} first to automatically generate one for you or see " - "{} for further instructions.".format( - click.style("Warning", fg="red", bold=True), - click.style("`pipenv install`", fg="green"), - click.style("`pipenv install --help`", fg="green"), - ), - err=True, + err.echo( + "[bold][red]Warning: No virtualenv has been created for this project yet! Consider " + "running `pipenv install` first to automatically generate one for you or see " + "`pipenv install --help` for further instructions.[/red][/bold]", ) sys.exit(1) - cmd_args = [python_path, pipdeptree_path, "-l"] - if flag: - cmd_args.append(flag) + c = run_command(cmd_args, is_verbose=project.s.is_verbose()) + # Run dep-tree. if not bare: if json: @@ -97,7 +64,7 @@ def do_graph(project, bare=False, json=False, json_tree=False, reverse=False): raise exceptions.JSONParseError(c.stdout, c.stderr) else: data += [d for d in parsed if d["package"]["key"] not in BAD_PACKAGES] - click.echo(simplejson.dumps(data, indent=4)) + console.print(simplejson.dumps(data, indent=4)) sys.exit(0) elif json_tree: @@ -118,30 +85,26 @@ def traverse(obj): raise exceptions.JSONParseError(c.stdout, c.stderr) else: data = traverse(parsed) - click.echo(simplejson.dumps(data, indent=4)) + console.print(simplejson.dumps(data, indent=4)) sys.exit(0) else: for line in c.stdout.strip().split("\n"): # Ignore bad packages as top level. - # TODO: This should probably be a "==" in + line.partition if line.split("==")[0] in BAD_PACKAGES and not reverse: continue # Bold top-level packages. if not line.startswith(" "): - click.echo(click.style(line, bold=True)) + console.print(f"[bold]{line}[/bold]") # Echo the rest. else: - click.echo(click.style(line, bold=False)) + console.print(line) else: - click.echo(c.stdout) + console.print(c.stdout) + if c.returncode != 0: - click.echo( - "{} {}".format( - click.style("ERROR: ", fg="red", bold=True), - click.style(f"{c.stderr}", fg="white"), - ), - err=True, + err.print( + f"[bold][red]ERROR: {c.stderr}[red][/bold]", ) # Return its return code. sys.exit(c.returncode) diff --git a/pipenv/routines/install.py b/pipenv/routines/install.py index 5d1e49195d..1b13caf1bb 100644 --- a/pipenv/routines/install.py +++ b/pipenv/routines/install.py @@ -35,13 +35,14 @@ def handle_new_packages( system, pypi_mirror, extra_pip_args, - categories, - skip_lock, + pipfile_categories, + perform_upgrades=True, index=None, ): + from pipenv.routines.update import do_update + new_packages = [] if packages or editable_packages: - from pipenv.routines.update import do_update pkg_list = packages + [f"-e {pkg}" for pkg in editable_packages] @@ -76,8 +77,8 @@ def handle_new_packages( sys.exit(1) try: - if categories: - for category in categories: + if pipfile_categories: + for category in pipfile_categories: added, cat, normalized_name = project.add_package_to_pipfile( pkg_requirement, pkg_line, dev, category ) @@ -107,26 +108,27 @@ def handle_new_packages( if pre: project.update_settings({"allow_prereleases": pre}) - # Use the update routine for new packages - if not skip_lock: - try: - do_update( - project, - dev=dev, - pre=pre, - packages=packages, - editable_packages=editable_packages, - pypi_mirror=pypi_mirror, - index_url=index, - extra_pip_args=extra_pip_args, - categories=categories, - ) - except Exception: - for pkg_name, category in new_packages: - project.remove_package_from_pipfile(pkg_name, category) - raise + # Use the update routine for new packages + if perform_upgrades: + try: + do_update( + project, + dev=dev, + pre=pre, + packages=packages, + editable_packages=editable_packages, + pypi_mirror=pypi_mirror, + index_url=index, + extra_pip_args=extra_pip_args, + categories=pipfile_categories, + ) + return new_packages, True + except Exception: + for pkg_name, category in new_packages: + project.remove_package_from_pipfile(pkg_name, category) + raise - return new_packages + return new_packages, False def handle_lockfile( @@ -141,10 +143,21 @@ def handle_lockfile( pypi_mirror, categories, ): - if (project.lockfile_exists and not ignore_pipfile) and not skip_lock: + """Handle the lockfile, updating if necessary. Returns True if package updates were applied.""" + if ( + (project.lockfile_exists and not ignore_pipfile) + and not skip_lock + and not packages + ): old_hash = project.get_lockfile_hash() new_hash = project.calculate_pipfile_hash() if new_hash != old_hash: + if deploy: + console.print( + f"Your Pipfile.lock ({old_hash}) is out of date. Expected: ({new_hash}).", + style="red", + ) + raise exceptions.DeployException handle_outdated_lockfile( project, packages, @@ -152,15 +165,13 @@ def handle_lockfile( new_hash=new_hash, system=system, allow_global=allow_global, - deploy=deploy, + skip_lock=skip_lock, pre=pre, pypi_mirror=pypi_mirror, categories=categories, ) elif not project.lockfile_exists and not skip_lock: - handle_missing_lockfile( - project, system, allow_global, pre, pypi_mirror, categories - ) + handle_missing_lockfile(project, system, allow_global, pre, pypi_mirror) def handle_outdated_lockfile( @@ -170,19 +181,12 @@ def handle_outdated_lockfile( new_hash, system, allow_global, - deploy, + skip_lock, pre, pypi_mirror, categories, ): - from pipenv.routines.update import do_update - - if deploy: - console.print( - f"Your Pipfile.lock ({old_hash}) is out of date. Expected: ({new_hash}).", - style="red", - ) - raise exceptions.DeployException + """Handle an outdated lockfile returning True if package updates were applied.""" if (system or allow_global) and not (project.s.PIPENV_VIRTUALENV): err.print( f"Pipfile.lock ({old_hash}) out of date, but installation uses --system so" @@ -201,17 +205,18 @@ def handle_outdated_lockfile( msg.format(old_hash, new_hash), style="bold yellow", ) - do_update( - project, - packages=packages, - pre=pre, - system=system, - pypi_mirror=pypi_mirror, - categories=categories, - ) + if not skip_lock: + do_lock( + project, + system=system, + pre=pre, + write=True, + pypi_mirror=pypi_mirror, + categories=None, + ) -def handle_missing_lockfile(project, system, allow_global, pre, pypi_mirror, categories): +def handle_missing_lockfile(project, system, allow_global, pre, pypi_mirror): if (system or allow_global) and not project.s.PIPENV_VIRTUALENV: raise exceptions.PipenvOptionsError( "--system", @@ -230,7 +235,6 @@ def handle_missing_lockfile(project, system, allow_global, pre, pypi_mirror, cat pre=pre, write=True, pypi_mirror=pypi_mirror, - categories=categories, ) @@ -249,7 +253,7 @@ def do_install( deploy=False, site_packages=None, extra_pip_args=None, - categories=None, + pipfile_categories=None, skip_lock=False, ): requirements_directory = fileutils.create_tracked_tempdir( @@ -259,6 +263,23 @@ def do_install( packages = packages if packages else [] editable_packages = editable_packages if editable_packages else [] package_args = [p for p in packages if p] + [p for p in editable_packages if p] + new_packages = [] + if dev and not pipfile_categories: + pipfile_categories = ["dev-packages"] + elif not pipfile_categories: + pipfile_categories = ["packages"] + + ensure_project( + project, + python=python, + system=system, + warn=True, + deploy=deploy, + skip_requirements=False, + pypi_mirror=pypi_mirror, + site_packages=site_packages, + pipfile_categories=pipfile_categories, + ) do_init( project, @@ -267,10 +288,8 @@ def do_install( allow_global=system, deploy=deploy, pypi_mirror=pypi_mirror, - categories=categories, skip_lock=skip_lock, - site_packages=site_packages, - python=python, + categories=pipfile_categories, ) do_install_validations( @@ -283,25 +302,27 @@ def do_install( requirementstxt=requirementstxt, pre=pre, deploy=deploy, - categories=categories, - skip_lock=skip_lock, - ) - - new_packages = handle_new_packages( - project, - packages, - editable_packages, - dev=dev, - pre=pre, - system=system, - pypi_mirror=pypi_mirror, - extra_pip_args=extra_pip_args, - categories=categories, + categories=pipfile_categories, skip_lock=skip_lock, - index=index, ) + if not deploy: + new_packages, _ = handle_new_packages( + project, + packages, + editable_packages, + dev=dev, + pre=pre, + system=system, + pypi_mirror=pypi_mirror, + extra_pip_args=extra_pip_args, + pipfile_categories=pipfile_categories, + perform_upgrades=not skip_lock, + index=index, + ) try: + if dev: # Install both develop and default package categories from Pipfile. + pipfile_categories = ["default", "develop"] do_install_dependencies( project, dev=dev, @@ -309,7 +330,7 @@ def do_install( requirements_dir=requirements_directory, pypi_mirror=pypi_mirror, extra_pip_args=extra_pip_args, - categories=categories, + categories=pipfile_categories, skip_lock=skip_lock, ) except Exception as e: @@ -452,8 +473,11 @@ def do_install_dependencies( else: lockfile = project.get_or_create_lockfile(categories=categories) if not bare: + lockfile_category = get_lockfile_section_using_pipfile_category( + pipfile_category + ) console.print( - f"Installing dependencies from Pipfile.lock " + f"Installing dependencies from Pipfile.lock [{lockfile_category}]" f"({lockfile['_meta'].get('hash', {}).get('sha256')[-6:]})...", style="bold", ) @@ -658,23 +682,12 @@ def do_init( deploy=False, pre=False, pypi_mirror=None, - categories=None, skip_lock=False, - site_packages=None, - python=None, + categories=None, ): - ensure_project( - project, - python=python, - system=system, - warn=True, - deploy=deploy, - skip_requirements=False, - pypi_mirror=pypi_mirror, - site_packages=site_packages, - categories=categories, - ) - + """Initialize the project, ensuring that the Pipfile and Pipfile.lock are in place. + Returns True if packages were updated + installed. + """ if not deploy: ensure_pipfile(project, system=system) diff --git a/pipenv/routines/lock.py b/pipenv/routines/lock.py index ec12370cab..b29a73bbf4 100644 --- a/pipenv/routines/lock.py +++ b/pipenv/routines/lock.py @@ -68,7 +68,7 @@ def do_lock( packages, which=project._which, project=project, - category=pipfile_category, + pipfile_category=pipfile_category, clear=clear, pre=pre, allow_global=system, diff --git a/pipenv/routines/outdated.py b/pipenv/routines/outdated.py index b34843f99c..c5974490a6 100644 --- a/pipenv/routines/outdated.py +++ b/pipenv/routines/outdated.py @@ -84,11 +84,10 @@ def do_outdated(project, pypi_mirror=None, pre=False, clear=False): fg="yellow", err=True, ) - if not outdated: - click.echo(click.style("All packages are up to date!", fg="green", bold=True)) - sys.exit(0) - for package, old_version, new_version in outdated: + for package, old_version, new_version in set(outdated).union(set(skipped)): click.echo( f"Package {package!r} out-of-date: {old_version!r} installed, {new_version!r} available." ) + if not outdated: + click.echo(click.style("All packages are up to date!", fg="green", bold=True)) sys.exit(bool(outdated)) diff --git a/pipenv/routines/sync.py b/pipenv/routines/sync.py index 84c66f0576..80e0b1ad26 100644 --- a/pipenv/routines/sync.py +++ b/pipenv/routines/sync.py @@ -50,7 +50,6 @@ def do_sync( pypi_mirror=pypi_mirror, deploy=deploy, system=system, - categories=categories, ) do_install_dependencies( project, diff --git a/pipenv/routines/uninstall.py b/pipenv/routines/uninstall.py index 7939ccbd35..fbb9d2e7a1 100644 --- a/pipenv/routines/uninstall.py +++ b/pipenv/routines/uninstall.py @@ -141,7 +141,7 @@ def do_uninstall( which=project._which, project=project, lockfile={}, - category=pipfile_category, + pipfile_category=pipfile_category, pre=pre, allow_global=system, pypi_mirror=pypi_mirror, diff --git a/pipenv/routines/update.py b/pipenv/routines/update.py index 42762d7df2..e15f6a5147 100644 --- a/pipenv/routines/update.py +++ b/pipenv/routines/update.py @@ -2,18 +2,24 @@ import os import sys from collections import defaultdict +from pathlib import Path +from typing import Dict, Set, Tuple -from pipenv.routines.lock import do_lock +from pipenv.exceptions import JSONParseError, PipenvCmdError +from pipenv.patched.pip._vendor.packaging.specifiers import SpecifierSet +from pipenv.patched.pip._vendor.packaging.version import InvalidVersion, Version from pipenv.routines.outdated import do_outdated from pipenv.routines.sync import do_sync +from pipenv.utils import err from pipenv.utils.dependencies import ( expansive_install_req_from_line, get_pipfile_category_using_lockfile_section, ) +from pipenv.utils.processes import run_command from pipenv.utils.project import ensure_project from pipenv.utils.requirements import add_index_to_pipfile from pipenv.utils.resolver import venv_resolve_deps -from pipenv.vendor import click +from pipenv.vendor import pipdeptree def do_update( @@ -36,6 +42,12 @@ def do_update( clear=False, lock_only=False, ): + """Update the virtualenv.""" + packages = [p for p in (packages or []) if p] + editable = [p for p in (editable_packages or []) if p] + if not outdated: + outdated = bool(dry_run) + ensure_project( project, python=python, @@ -44,29 +56,19 @@ def do_update( site_packages=site_packages, clear=clear, ) - packages = [p for p in (packages or []) if p] - editable = [p for p in (editable_packages or []) if p] + if not outdated: - outdated = bool(dry_run) - if not packages: - click.echo( - "{} {} {} {}{}".format( - click.style("Running", bold=True), - click.style("$ pipenv lock", fg="yellow", bold=True), - click.style("then", bold=True), - click.style("$ pipenv sync", fg="yellow", bold=True), - click.style(".", bold=True), - ) - ) - do_lock( + # Pre-sync packages for pipdeptree resolution to avoid conflicts + do_sync( project, + dev=dev, + categories=categories, + python=python, + bare=bare, clear=clear, - pre=pre, pypi_mirror=pypi_mirror, - write=not outdated, extra_pip_args=extra_pip_args, ) - else: upgrade( project, pre=pre, @@ -80,15 +82,7 @@ def do_update( lock_only=lock_only, extra_pip_args=extra_pip_args, ) - - if outdated: - do_outdated( - project, - clear=clear, - pre=pre, - pypi_mirror=pypi_mirror, - ) - else: + # Finally sync packages after upgrade do_sync( project, dev=dev, @@ -99,6 +93,84 @@ def do_update( pypi_mirror=pypi_mirror, extra_pip_args=extra_pip_args, ) + else: + do_outdated( + project, + clear=clear, + pre=pre, + pypi_mirror=pypi_mirror, + ) + + +def get_reverse_dependencies(project) -> Dict[str, Set[Tuple[str, str]]]: + """Get reverse dependencies using pipdeptree.""" + pipdeptree_path = Path(pipdeptree.__file__).parent + python_path = project.python() + cmd_args = [python_path, pipdeptree_path, "-l", "--reverse", "--json-tree"] + + c = run_command(cmd_args, is_verbose=project.s.is_verbose()) + if c.returncode != 0: + raise PipenvCmdError(c.err, c.out, c.returncode) + + try: + dep_tree = json.loads(c.stdout.strip()) + except json.JSONDecodeError: + raise JSONParseError(c.stdout, c.stderr) + + # Build reverse dependency map: package -> set of (dependent_package, required_version) + reverse_deps = defaultdict(set) + + def process_tree_node(n, parents=None): + if parents is None: + parents = [] + + package_name = n["package_name"] + required_version = n.get("required_version", "Any") + + # Add the current node to its parents' reverse dependencies + for parent in parents: + reverse_deps[parent].add((package_name, required_version)) + + # Process dependencies recursively, keeping track of parent path + for dep in n.get("dependencies", []): + process_tree_node(dep, parents + [package_name]) + + # Start processing the tree from the root nodes + for node in dep_tree: + process_tree_node(node) + + return reverse_deps + + +def check_version_conflicts( + package_name: str, + new_version: str, + reverse_deps: Dict[str, Set[Tuple[str, str]]], + lockfile: dict, +) -> Set[str]: + """ + Check if updating a package would create version conflicts with its dependents. + Returns set of conflicting packages. + """ + conflicts = set() + try: + new_version_obj = Version(new_version) + except InvalidVersion: + new_version_obj = SpecifierSet(new_version) + + for dependent, req_version in reverse_deps.get(package_name, set()): + if req_version == "Any": + continue + + try: + specifier_set = SpecifierSet(req_version) + if not specifier_set.contains(new_version_obj): + conflicts.add(dependent) + except Exception: + # If we can't parse the version requirement, assume it's a conflict + conflicts.add(dependent) + + return conflicts def upgrade( @@ -114,13 +186,33 @@ def upgrade( lock_only=False, extra_pip_args=None, ): + """Enhanced upgrade command with dependency conflict detection.""" lockfile = project.lockfile() if not pre: pre = project.settings.get("allow_prereleases") - if dev or "dev-packages" in categories: - categories = ["develop"] - elif not categories or "packages" in categories: - categories = ["default"] + if not categories: + + if dev and not packages: + categories = ["default", "develop"] + elif dev and packages: + categories = ["develop"] + else: + categories = ["default"] + elif "dev-packages" in categories: + categories.remove("dev-packages") + categories.insert(0, "develop") + elif "packages" in categories: + categories.remove("packages") + categories.insert(0, "default") + + # Get current dependency graph + try: + reverse_deps = get_reverse_dependencies(project) + except Exception as e: + err.print( + f"[red bold]Warning[/red bold]: Unable to analyze dependencies: {str(e)}" + ) + reverse_deps = {} index_name = None if index_url: @@ -131,6 +223,28 @@ def upgrade( package_args = list(packages) + [f"-e {pkg}" for pkg in editable_packages] + # Early conflict detection + conflicts_found = False + for package in package_args: + if "==" in package: + name, version = package.split("==") + conflicts = check_version_conflicts(name, version, reverse_deps, lockfile) + if conflicts: + conflicts_found = True + err.print( + f"[red bold]Error[/red bold]: Updating [bold]{name}[/bold] " + f"to version {version} would create conflicts with: {', '.join(sorted(conflicts))}" + ) + + if conflicts_found: + err.print( + "\nTo resolve conflicts, try:\n" + "1. Explicitly upgrade conflicting packages together\n" + "2. Use compatible versions\n" + "3. Remove version constraints from Pipfile" + ) + sys.exit(1) + requested_install_reqs = defaultdict(dict) requested_packages = defaultdict(dict) for category in categories: @@ -150,9 +264,42 @@ def upgrade( requested_packages[pipfile_category][normalized_name] = pipfile_entry requested_install_reqs[pipfile_category][normalized_name] = install_req + # Consider reverse packages in reverse_deps + if normalized_name in reverse_deps: + for dependency, req_version in reverse_deps[normalized_name]: + if req_version == "Any": + package_args.append(dependency) + pipfile_entry = project.get_pipfile_entry( + dependency, category=pipfile_category + ) + requested_packages[pipfile_category][dependency] = ( + pipfile_entry if pipfile_entry else "*" + ) + continue + + try: # Otherwise we have a specific version requirement + specifier_set = SpecifierSet(req_version) + package_args.append(f"{dependency}=={specifier_set}") + pipfile_entry = project.get_pipfile_entry( + dependency, category=pipfile_category + ) + requested_packages[pipfile_category][dependency] = ( + pipfile_entry if pipfile_entry else "*" + ) + + except Exception as e: + err.print( + f"[bold][yellow]Warning:[/yellow][/bold] " + f"Unable to parse version specifier for {dependency}: {str(e)}" + ) + if not package_args: - click.echo("Nothing to upgrade!") - sys.exit(0) + err.print("Nothing to upgrade!") + return + else: + err.print( + f"[bold][green]Upgrading[/bold][/green] {', '.join(package_args)} in [{category}] dependencies." + ) # Resolve package to generate constraints of new package data upgrade_lock_data = venv_resolve_deps( @@ -160,36 +307,37 @@ def upgrade( which=project._which, project=project, lockfile={}, - category=pipfile_category, + pipfile_category=pipfile_category, pre=pre, allow_global=system, pypi_mirror=pypi_mirror, ) if not upgrade_lock_data: - click.echo("Nothing to upgrade!") - sys.exit(0) + err.print("Nothing to upgrade!") + return complete_packages = project.parsed_pipfile.get(pipfile_category, {}) - for package_name in requested_packages[pipfile_category].keys(): - pipfile_entry = project.get_pipfile_entry( - package_name, category=pipfile_category - ) - if package_name not in complete_packages: - complete_packages.append(package_name, pipfile_entry) - else: - complete_packages[package_name] = pipfile_entry full_lock_resolution = venv_resolve_deps( complete_packages, which=project._which, project=project, lockfile={}, - category=pipfile_category, + pipfile_category=pipfile_category, pre=pre, allow_global=system, pypi_mirror=pypi_mirror, ) - # Mutate the existing lockfile with the upgrade data for the categories + + # Verify no conflicts were introduced during resolution + for package_name, package_data in full_lock_resolution.items(): + if package_name in upgrade_lock_data: + version = package_data.get("version", "").replace("==", "") + if not version: + # Either vcs or file package + continue + + # Update lockfile with verified resolution data for package_name in upgrade_lock_data: correct_package_lock = full_lock_resolution.get(package_name) if correct_package_lock: diff --git a/pipenv/utils/dependencies.py b/pipenv/utils/dependencies.py index 7fd12d2fa6..e8526cf56d 100644 --- a/pipenv/utils/dependencies.py +++ b/pipenv/utils/dependencies.py @@ -87,22 +87,18 @@ def clean_pkg_version(version): def get_lockfile_section_using_pipfile_category(category): if category == "dev-packages": - lockfile_section = "develop" + return "develop" elif category == "packages": - lockfile_section = "default" - else: - lockfile_section = category - return lockfile_section + return "default" + return category def get_pipfile_category_using_lockfile_section(category): if category == "develop": - lockfile_section = "dev-packages" + return "dev-packages" elif category == "default": - lockfile_section = "packages" - else: - lockfile_section = category - return lockfile_section + return "packages" + return category class HackedPythonVersion: @@ -485,7 +481,7 @@ def dependency_as_pip_install_line( else: if "#egg=" in vcs_url: vcs_url = vcs_url.split("#egg=")[0] - git_req = f"{dep_name}{extras}@ {include_vcs}{vcs_url}{ref}" + git_req = f"{dep_name}{extras} @ {include_vcs}{vcs_url}{ref}" if "subdirectory" in dep: git_req += f"#subdirectory={dep['subdirectory']}" @@ -780,7 +776,7 @@ def determine_package_name(package: InstallRequirement): elif "#egg=" in str(package): req_name = str(package).split("#egg=")[1] req_name = req_name.split("[")[0] - elif "@ " in str(package): + elif " @ " in str(package): req_name = str(package).split("@ ")[0] req_name = req_name.split("[")[0] elif package.link and package.link.scheme in REMOTE_SCHEMES: diff --git a/pipenv/utils/locking.py b/pipenv/utils/locking.py index 771466b5e7..413e2fc86c 100644 --- a/pipenv/utils/locking.py +++ b/pipenv/utils/locking.py @@ -7,9 +7,10 @@ from json import JSONDecodeError from pathlib import Path from tempfile import NamedTemporaryFile -from typing import Any, Dict, Iterator, List, Optional +from typing import Any, Dict, Iterator, List, Optional, Set, Tuple from pipenv.patched.pip._internal.req.req_install import InstallRequirement +from pipenv.utils.constants import VCS_LIST from pipenv.utils.dependencies import ( clean_resolved_dep, determine_vcs_revision_hash, @@ -43,69 +44,92 @@ def merge_markers(entry, markers): def format_requirement_for_lockfile( req: InstallRequirement, - markers_lookup, - index_lookup, - original_deps, - pipfile_entries, - hashes=None, -): - if req.specifier: - version = str(req.specifier) - else: - version = None + markers_lookup: Dict[str, str], + index_lookup: Dict[str, str], + original_deps: Dict[str, Any], + pipfile_entries: Dict[str, Any], + hashes: Optional[Set[str]] = None, +) -> Tuple[str, Dict[str, Any]]: + """Format a requirement for the lockfile with improved VCS handling.""" name = normalize_name(req.name) - index = index_lookup.get(name) - markers = req.markers - req.index = index - pipfile_entry = pipfile_entries[name] if name in pipfile_entries else {} - entry = {} + entry: Dict[str, Any] = {"name": name} + pipfile_entry = pipfile_entries.get(name, pipfile_entries.get(req.name, {})) + # Handle VCS requirements + is_vcs_dep = next(iter([vcs for vcs in VCS_LIST if vcs in pipfile_entry]), None) if req.link and req.link.is_vcs: - vcs = req.link.scheme.split("+", 1)[0] - entry["ref"] = determine_vcs_revision_hash(req, vcs, pipfile_entry.get("ref")) + is_vcs_dep = True + if is_vcs_dep: + if req.link and req.link.is_vcs: + link = req.link + else: + link = req.cached_wheel_source_link + vcs = link.scheme.split("+", 1)[0] + # Get VCS URL from original deps or normalize the link URL if name in original_deps: entry[vcs] = original_deps[name] else: - vcs_url, _ = normalize_vcs_url(req.link.url) + vcs_url, _ = normalize_vcs_url(link.url) entry[vcs] = vcs_url + + # Handle subdirectory information if pipfile_entry.get("subdirectory"): entry["subdirectory"] = pipfile_entry["subdirectory"] - elif req.link.subdirectory_fragment: - entry["subdirectory"] = req.link.subdirectory_fragment - if req.req: - entry["version"] = str(req.specifier) - elif version: - entry["version"] = version - elif req.link and req.link.is_file: - entry["file"] = req.link.url - if hashes: - entry["hashes"] = sorted(set(hashes)) - entry["name"] = name - if index: - entry.update({"index": index}) + elif link.subdirectory_fragment: + entry["subdirectory"] = link.subdirectory_fragment + + # Handle reference information - try multiple sources + ref = determine_vcs_revision_hash(req, vcs, pipfile_entry.get("ref")) + if ref: + entry["ref"] = ref + # Handle non-VCS requirements + else: + if req.req and req.req.specifier: + entry["version"] = str(req.req.specifier) + elif req.specifier: + entry["version"] = str(req.specifier) + if req.link and req.link.is_file: + entry["file"] = req.link.url + # Add index information + if name in index_lookup: + entry["index"] = index_lookup[name] + + # Handle markers + markers = req.markers if markers: - entry.update({"markers": str(markers)}) + entry["markers"] = str(markers) if name in markers_lookup: merge_markers(entry, markers_lookup[name]) - if isinstance(pipfile_entry, dict) and "markers" in pipfile_entry: - merge_markers(entry, pipfile_entry["markers"]) - if isinstance(pipfile_entry, dict) and "os_name" in pipfile_entry: - merge_markers(entry, f"os_name {pipfile_entry['os_name']}") - entry = translate_markers(entry) + if isinstance(pipfile_entry, dict): + if "markers" in pipfile_entry: + merge_markers(entry, pipfile_entry["markers"]) + if "os_name" in pipfile_entry: + merge_markers(entry, f"os_name {pipfile_entry['os_name']}") + + # Handle extras if req.extras: entry["extras"] = sorted(req.extras) - if isinstance(pipfile_entry, dict) and pipfile_entry.get("file"): - entry["file"] = pipfile_entry["file"] - if pipfile_entry.get("editable"): - entry["editable"] = pipfile_entry.get("editable") - entry.pop("version", None) - entry.pop("index", None) - elif isinstance(pipfile_entry, dict) and pipfile_entry.get("path"): - entry["path"] = pipfile_entry["path"] - if pipfile_entry.get("editable"): - entry["editable"] = pipfile_entry.get("editable") - entry.pop("version", None) - entry.pop("index", None) + + # Handle hashes + if hashes: + entry["hashes"] = sorted(set(hashes)) + + # Handle file/path entries from Pipfile + if isinstance(pipfile_entry, dict): + if pipfile_entry.get("file"): + entry["file"] = pipfile_entry["file"] + if pipfile_entry.get("editable"): + entry["editable"] = pipfile_entry["editable"] + entry.pop("version", None) + entry.pop("index", None) + elif pipfile_entry.get("path"): + entry["path"] = pipfile_entry["path"] + if pipfile_entry.get("editable"): + entry["editable"] = pipfile_entry["editable"] + entry.pop("version", None) + entry.pop("index", None) + + entry = translate_markers(entry) return name, entry @@ -113,7 +137,7 @@ def get_locked_dep(project, dep, pipfile_section, current_entry=None): # initialize default values is_top_level = False - # if the dependency has a name, find corresponding entry in pipfile + # # if the dependency has a name, find corresponding entry in pipfile if isinstance(dep, dict) and dep.get("name"): dep_name = pep423_name(dep["name"]) for pipfile_key, pipfile_entry in pipfile_section.items(): diff --git a/pipenv/utils/pipfile.py b/pipenv/utils/pipfile.py index 93eb0f69cc..78e43c55ce 100644 --- a/pipenv/utils/pipfile.py +++ b/pipenv/utils/pipfile.py @@ -64,7 +64,7 @@ def find_pipfile(max_depth=3): def ensure_pipfile( - project, validate=True, skip_requirements=False, system=False, categories=None + project, validate=True, skip_requirements=False, system=False, pipfile_categories=None ): """Creates a Pipfile for the project, if it doesn't exist.""" @@ -96,7 +96,7 @@ def ensure_pipfile( ) as st: # Import requirements.txt. try: - import_requirements(project, categories=categories) + import_requirements(project, categories=pipfile_categories) except Exception: err.print(environments.PIPENV_SPINNER_FAIL_TEXT.format("Failed...")) else: diff --git a/pipenv/utils/project.py b/pipenv/utils/project.py index 19464bb370..b6e58119fd 100644 --- a/pipenv/utils/project.py +++ b/pipenv/utils/project.py @@ -1,6 +1,5 @@ import os import sys -from functools import lru_cache from typing import Optional from pipenv import exceptions @@ -32,7 +31,7 @@ def ensure_project( skip_requirements=False, pypi_mirror=None, clear=False, - categories=None, + pipfile_categories=None, ): """Ensures both Pipfile and virtualenv exist for the project.""" @@ -92,12 +91,11 @@ def ensure_project( validate=validate, skip_requirements=skip_requirements, system=system, - categories=categories, + pipfile_categories=pipfile_categories, ) os.environ["PIP_PYTHON_PATH"] = project.python(system=system) -@lru_cache def get_setuptools_version() -> Optional["STRING_TYPE"]: try: setuptools_dist = importlib_metadata.distribution("setuptools") diff --git a/pipenv/utils/requirements.py b/pipenv/utils/requirements.py index e6a761bfed..e786180011 100644 --- a/pipenv/utils/requirements.py +++ b/pipenv/utils/requirements.py @@ -193,7 +193,7 @@ def requirement_from_lockfile( pip_line = f"-e {include_vcs}{vcs_url}{ref_str}{egg_fragment}{extras}" pip_line += f"&subdirectory={subdirectory}" if subdirectory else "" else: - pip_line = f"{package_name}{extras}@ {include_vcs}{vcs_url}{ref_str}" + pip_line = f"{package_name}{extras} @ {include_vcs}{vcs_url}{ref_str}" pip_line += f"#subdirectory={subdirectory}" if subdirectory else "" return pip_line # Handling file-sourced packages @@ -212,7 +212,7 @@ def requirement_from_lockfile( return pip_line # Handling packages from standard pypi like indexes - version = package_info.get("version", "").replace("==", "") + version = package_info.get("version", "") hashes = ( f" --hash={' --hash='.join(package_info['hashes'])}" if include_hashes and "hashes" in package_info @@ -223,7 +223,7 @@ def requirement_from_lockfile( if "extras" in package_info else "" ) - pip_line = f"{package_name}{extras}=={version}{os_markers}{markers}{hashes}" + pip_line = f"{package_name}{extras}{version}{os_markers}{markers}{hashes}" return pip_line diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index f44505fe3a..7de6a7f779 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -5,9 +5,9 @@ import sys import tempfile import warnings -from functools import lru_cache +from functools import cached_property, lru_cache from pathlib import Path -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional, Tuple, Union from pipenv import environments, resolver from pipenv.exceptions import ResolutionFailure @@ -27,6 +27,7 @@ from pipenv.patched.pip._vendor.packaging.utils import canonicalize_name from pipenv.project import Project from pipenv.utils import console, err +from pipenv.utils.dependencies import determine_vcs_revision_hash, normalize_vcs_url from pipenv.utils.fileutils import create_tracked_tempdir from pipenv.utils.requirements import normalize_name @@ -38,7 +39,6 @@ get_lockfile_section_using_pipfile_category, is_pinned_requirement, prepare_constraint_file, - translate_markers, ) from .indexes import parse_indexes, prepare_pip_source_args from .internet import is_pypi_url @@ -109,7 +109,7 @@ def __init__( skipped=None, clear=False, pre=False, - category=None, + lockfile_category=None, original_deps=None, install_reqs=None, pipfile_entries=None, @@ -122,7 +122,7 @@ def __init__( self.hashes = {} self.clear = clear self.pre = pre - self.category = category + self.category = lockfile_category self.results = None self.markers_lookup = markers_lookup if markers_lookup is not None else {} self.index_lookup = index_lookup if index_lookup is not None else {} @@ -131,7 +131,7 @@ def __init__( self.requires_python_markers = {} self.original_deps = original_deps if original_deps is not None else {} self.install_reqs = install_reqs if install_reqs is not None else {} - self.pipfile_entries = pipfile_entries if pipfile_entries is not None else {} + self.pipfile_entries = pipfile_entries self._retry_attempts = 0 self._hash_cache = None @@ -175,7 +175,7 @@ def create( req_dir: str = None, clear: bool = False, pre: bool = False, - category: str = None, + pipfile_category: str = None, ) -> "Resolver": if not req_dir: req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") @@ -185,11 +185,11 @@ def create( markers_lookup = {} original_deps = {} install_reqs = {} - pipfile_entries = {} + pipfile_entries = project.get_pipfile_section(pipfile_category) skipped = {} if sources is None: sources = project.sources - packages = project.get_pipfile_section(category) + packages = project.get_pipfile_section(pipfile_category) constraints = set() for package_name, dep in deps.items(): # Build up the index and markers lookups if not dep: @@ -201,8 +201,7 @@ def create( install_reqs[package_name] = install_req index, extra_index, trust_host, remainder = parse_indexes(dep) if package_name in packages: - pipfile_entry = packages[package_name] - pipfile_entries[package_name] = pipfile_entry + pipfile_entry = pipfile_entries.get(package_name) if isinstance(pipfile_entry, dict): if packages[package_name].get("index"): index_lookup[canonical_package_name] = packages[package_name].get( @@ -221,7 +220,7 @@ def create( markers_lookup[package_name] = install_req.markers if is_constraint: constraints.add(dep) - # raise Exception(constraints, original_deps, install_reqs, pipfile_entries) + lockfile_category = get_lockfile_section_using_pipfile_category(pipfile_category) resolver = Resolver( set(), req_dir, @@ -232,7 +231,7 @@ def create( skipped=skipped, clear=clear, pre=pre, - category=category, + lockfile_category=lockfile_category, original_deps=original_deps, install_reqs=install_reqs, pipfile_entries=pipfile_entries, @@ -308,7 +307,7 @@ def pip_options(self): ) return pip_options - @property + @cached_property def session(self): return self.pip_command._build_session(self.pip_options) @@ -341,17 +340,6 @@ def finder(self, ignore_compatibility=False): finder._ignore_compatibility = ignore_compatibility return finder - @property - def parsed_constraints(self): - pip_options = self.pip_options - pip_options.extra_index_urls = [] - return parse_requirements( - self.prepare_constraint_file(), - finder=self.finder(), - session=self.session, - options=pip_options, - ) - @property def parsed_default_constraints(self): pip_options = self.pip_options @@ -365,8 +353,34 @@ def parsed_default_constraints(self): ) return set(parsed_default_constraints) + @property + def parsed_constraints(self): + """Get parsed constraints including those from default packages if needed.""" + pip_options = self.pip_options + pip_options.extra_index_urls = [] + constraints = list( + parse_requirements( + self.prepare_constraint_file(), + finder=self.finder(), + session=self.session, + options=pip_options, + ) + ) + + # Only add default constraints for dev packages if setting allows + if self.category != "default" and self.project.settings.get( + "use_default_constraints", True + ): + constraints.extend(self.parsed_default_constraints) + + return constraints + @property def default_constraints(self): + """Get constraints from default section when installing dev packages.""" + if not self.project.settings.get("use_default_constraints", True): + return set() + possible_default_constraints = [ install_req_from_parsed_requirement( c, @@ -392,14 +406,19 @@ def possible_constraints(self): @property def constraints(self): + """Get all applicable constraints.""" possible_constraints_list = self.possible_constraints constraints_list = set() for c in possible_constraints_list: constraints_list.add(c) - # Only use default_constraints when installing dev-packages - if self.category != "packages": + + # Only use default_constraints when installing dev-packages and setting allows + if self.category != "default" and self.project.settings.get( + "use_default_constraints", True + ): constraints_list |= self.default_constraints - return set(constraints_list) + + return constraints_list @contextlib.contextmanager def get_resolver(self, clear=False): @@ -434,10 +453,9 @@ def get_resolver(self, clear=False): yield resolver def resolve(self): - constraints = self.constraints with temp_environ(), self.get_resolver() as resolver: try: - results = resolver.resolve(constraints, check_supported_wheels=False) + results = resolver.resolve(self.constraints, check_supported_wheels=False) except InstallationError as e: raise ResolutionFailure(message=str(e)) else: @@ -495,45 +513,55 @@ def _fold_markers(self, dependency_tree, install_req, checked_dependencies=None) def resolve_constraints(self): from .markers import marker_from_specifier - # Build mapping of where package originates from + # Build mapping of package origins and Python requirements comes_from = {} + python_requirements = {} + for result in self.resolved_tree: + # Track package origin if isinstance(result.comes_from, InstallRequirement): comes_from[result.name] = result.comes_from else: comes_from[result.name] = "Pipfile" - # Build up the results tree with markers + # Collect Python requirements from package metadata + candidate = ( + self.finder() + .find_best_candidate(result.name, result.specifier) + .best_candidate + ) + if candidate and candidate.link.requires_python: + try: + marker = marker_from_specifier(candidate.link.requires_python) + python_requirements[result.name] = marker + except TypeError: + continue + + # Build the results tree with markers new_tree = set() for result in self.resolved_tree: - if result.markers: + # Start with any Python requirement markers + if result.name in python_requirements: + marker = python_requirements[result.name] + self.markers[result.name] = marker + result.markers = marker + if result.req: + result.req.marker = marker + elif result.markers: self.markers[result.name] = result.markers - else: - candidate = ( - self.finder() - .find_best_candidate(result.name, result.specifier) - .best_candidate - ) - if candidate: - requires_python = candidate.link.requires_python - if requires_python: - try: - marker = marker_from_specifier(requires_python) - self.markers[result.name] = marker - result.markers = marker - if result.req: - result.req.marker = marker - except TypeError as e: - err.print( - f"Error generating python marker for {candidate}. " - f"Is the specifier {requires_python} incorrectly quoted or otherwise wrong?" - f"Full error: {e}", - ) + if result.req: + result.req.marker = result.markers + new_tree.add(result) - # Fold markers + # Use existing fold_markers to properly combine all constraints for result in new_tree: - self._fold_markers(comes_from, result) + folded_markers = self._fold_markers(comes_from, result) + if folded_markers: + self.markers[result.name] = folded_markers + result.markers = folded_markers + if result.req: + result.req.marker = folded_markers self.resolved_tree = new_tree @@ -589,36 +617,57 @@ def resolve_hashes(self): return self.hashes def clean_skipped_result( - self, req_name: str, ireq: InstallRequirement, pipfile_entry - ): - ref = None + self, + req_name: str, + ireq: InstallRequirement, + pipfile_entry: Union[str, Dict[str, Any]], + ) -> Tuple[str, Dict[str, Any]]: + """Clean up skipped requirements with better VCS handling.""" + # Start with pipfile entry if it's a dict, otherwise create new dict + entry = pipfile_entry.copy() if isinstance(pipfile_entry, dict) else {} + entry["name"] = req_name + + # Handle VCS references if ireq.link and ireq.link.is_vcs: - ref = ireq.link.egg_fragment + vcs = ireq.link.scheme.split("+", 1)[0] - if isinstance(pipfile_entry, dict): - entry = pipfile_entry.copy() - else: - entry = {} - entry["name"] = req_name + # Try to get reference from multiple sources + ref = determine_vcs_revision_hash(ireq, vcs, ireq.link) + + if ref: + entry["ref"] = ref + elif ireq.link.hash: + entry["ref"] = ireq.link.hash + + # Ensure VCS URL is present + if vcs not in entry: + vcs_url, _ = normalize_vcs_url(ireq.link.url) + entry[vcs] = vcs_url + + # Remove version if editable if entry.get("editable", False) and entry.get("version"): del entry["version"] - ref = ref if ref is not None else entry.get("ref") - if ref: - entry["ref"] = ref + + # Add hashes collected_hashes = self.collect_hashes(ireq) if collected_hashes: entry["hashes"] = sorted(set(collected_hashes)) + return req_name, entry - def clean_results(self): - reqs = [(ireq,) for ireq in self.resolved_tree] + def clean_results(self) -> List[Dict[str, Any]]: + """Clean all results including both resolved and skipped packages.""" results = {} - for (ireq,) in reqs: + + # Handle resolved packages + for ireq in self.resolved_tree: if normalize_name(ireq.name) in self.skipped: continue + collected_hashes = self.hashes.get(ireq, set()) if collected_hashes: collected_hashes = sorted(collected_hashes) + name, entry = format_requirement_for_lockfile( ireq, self.markers_lookup, @@ -627,23 +676,25 @@ def clean_results(self): self.pipfile_entries, collected_hashes, ) - entry = translate_markers(entry) + if name in results: results[name].update(entry) else: results[name] = entry + + # Handle skipped packages for req_name in self.skipped: install_req = self.install_reqs[req_name] - name, entry = self.clean_skipped_result( - req_name, install_req, self.pipfile_entries[req_name] - ) - entry = translate_markers(entry) + pipfile_entry = self.pipfile_entries.get(req_name, {}) + + name, entry = self.clean_skipped_result(req_name, install_req, pipfile_entry) + if name in results: results[name].update(entry) else: results[name] = entry - results = list(results.values()) - return results + + return list(results.values()) def _show_warning(message, category, filename, lineno, line): @@ -666,7 +717,7 @@ def actually_resolve_deps( sources, clear, pre, - category, + pipfile_category, req_dir, ): with warnings.catch_warnings(record=True) as warning_list: @@ -679,7 +730,7 @@ def actually_resolve_deps( req_dir, clear, pre, - category, + pipfile_category, ) resolver.resolve() hashes = resolver.resolve_hashes @@ -727,7 +778,7 @@ def venv_resolve_deps( deps, which, project, - category, + pipfile_category, pre=False, clear=False, allow_global=False, @@ -760,21 +811,21 @@ def venv_resolve_deps( :return: The lock data :rtype: dict """ - lockfile_section = get_lockfile_section_using_pipfile_category(category) + lockfile_category = get_lockfile_section_using_pipfile_category(pipfile_category) if not deps: if not project.pipfile_exists: return {} - deps = project.parsed_pipfile.get(category, {}) + deps = project.parsed_pipfile.get(pipfile_category, {}) if not deps: return {} if not pipfile: - pipfile = getattr(project, category, {}) + pipfile = getattr(project, pipfile_category, {}) if lockfile is None: - lockfile = project.lockfile(categories=[category]) + lockfile = project.lockfile(categories=[pipfile_category]) if old_lock_data is None: - old_lock_data = lockfile.get(lockfile_section, {}) + old_lock_data = lockfile.get(lockfile_category, {}) req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements") results = [] with temp_environ(): @@ -790,7 +841,7 @@ def venv_resolve_deps( if extra_pip_args: os.environ["PIPENV_EXTRA_PIP_ARGS"] = json.dumps(extra_pip_args) with console.status( - f"Locking {category}...", spinner=project.s.PIPENV_SPINNER + f"Locking {pipfile_category}...", spinner=project.s.PIPENV_SPINNER ) as st: # This conversion is somewhat slow on local and file-type requirements since # we now download those requirements / make temporary folders to perform @@ -811,7 +862,7 @@ def venv_resolve_deps( write=False, requirements_dir=req_dir, packages=deps, - category=category, + pipfile_category=pipfile_category, constraints=deps, ) if results: @@ -834,9 +885,9 @@ def venv_resolve_deps( cmd.append("--clear") if allow_global: cmd.append("--system") - if category: + if pipfile_category: cmd.append("--category") - cmd.append(category) + cmd.append(pipfile_category) if project.s.is_verbose(): cmd.append("--verbose") target_file = tempfile.NamedTemporaryFile( @@ -879,10 +930,10 @@ def venv_resolve_deps( ) err.print(f"Output: {c.stdout.strip()}") err.print(f"Error: {c.stderr.strip()}") - if lockfile_section not in lockfile: - lockfile[lockfile_section] = {} + if lockfile_category not in lockfile: + lockfile[lockfile_category] = {} return prepare_lockfile( - project, results, pipfile, lockfile[lockfile_section], old_lock_data + project, results, pipfile, lockfile[lockfile_category], old_lock_data ) @@ -894,7 +945,7 @@ def resolve_deps( python=False, clear=False, pre=False, - category=None, + pipfile_category=None, allow_global=False, req_dir=None, ): @@ -922,7 +973,7 @@ def resolve_deps( sources, clear, pre, - category, + pipfile_category, req_dir=req_dir, ) except RuntimeError: @@ -948,7 +999,7 @@ def resolve_deps( sources, clear, pre, - category, + pipfile_category, req_dir=req_dir, ) except RuntimeError: diff --git a/pyproject.toml b/pyproject.toml index 325381e01b..7b0eb1a1dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,11 +198,11 @@ keep_full_version = true max_supported_python = "3.13" [tool.pytest.ini_options] -addopts = "-ra" +addopts = "-ra --no-cov" plugins = "xdist" testpaths = [ "tests" ] # Add vendor and patched in addition to the default list of ignored dirs -# Additionally, ignore tasks, news, test subdirectories and peeps directory +# Additionally, ignore tasks, news, test subdirectories norecursedirs = [ ".*", "build", @@ -218,7 +218,6 @@ norecursedirs = [ "docs", "tests/test_artifacts", "tests/pypi", - "peeps", ] filterwarnings = [ ] # These are not all the custom markers, but most of the ones with repeat uses diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index 2ffc187e2f..b8dadf5ced 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -1,3 +1,4 @@ +import json import os import re import sys @@ -108,60 +109,53 @@ def test_pipenv_graph(pipenv_instance_pypi): @pytest.mark.cli def test_pipenv_graph_reverse(pipenv_instance_private_pypi): from pipenv.cli import cli - from pipenv.vendor.click.testing import ( - CliRunner, - ) # not thread safe but graph is a tricky test + from pipenv.vendor.click.testing import CliRunner with pipenv_instance_private_pypi() as p: c = p.pipenv("install tablib==0.13.0") assert c.returncode == 0 + cli_runner = CliRunner(mix_stderr=False) - c = cli_runner.invoke(cli, "graph --reverse") + c = cli_runner.invoke(cli, "graph --reverse --json-tree") assert c.exit_code == 0 - output = c.stdout - requests_dependency = [ - ("backports.csv", "backports.csv"), - ("odfpy", "odfpy"), - ("openpyxl", "openpyxl>=2.4.0"), - ("pyyaml", "pyyaml"), - ("xlrd", "xlrd"), - ("xlwt", "xlwt"), - ] - - for dep_name, dep_constraint in requests_dependency: - pat = rf"{dep_name}==[\d.]+" - dep_match = re.search(pat, output, flags=re.MULTILINE | re.IGNORECASE) - assert dep_match is not None, f"{pat} not found in {output}" - - # openpyxl should be indented - if dep_name == "openpyxl": - openpyxl_dep = re.search( - r"^openpyxl", output, flags=re.MULTILINE | re.IGNORECASE - ) - assert ( - openpyxl_dep is None - ), f"openpyxl should not appear at beginning of lines in {output}" - assert re.search(r"openpyxl==2\.5\.4\s*\[requires:\s*et[-_]xmlfile\]", output, flags=re.MULTILINE | re.IGNORECASE) - else: - dep_match = re.search( - rf"^[ -]*{dep_name}==[\d.]+$", - output, - flags=re.MULTILINE | re.IGNORECASE, - ) - assert ( - dep_match is not None - ), f"{dep_name} not found at beginning of line in {output}" - - dep_requests_match = re.search( - rf"└── tablib==0.13.0 \[requires: {dep_constraint}", - output, - flags=re.MULTILINE | re.IGNORECASE, - ) - assert ( - dep_requests_match is not None - ), f"constraint {dep_constraint} not found in {output}" - assert dep_requests_match.start() > dep_match.start() + output = c.stdout + try: + json_output = json.loads(output) + except json.JSONDecodeError: + pytest.fail(f"Failed to parse JSON from output:\n{output}") + + # Define the expected dependencies and their structure + expected_dependencies = { + "backports.csv": ["tablib"], + "et_xmlfile": ["openpyxl"], + "jdcal": ["openpyxl"], + "odfpy": ["tablib"], + "PyYAML": ["tablib"], + "xlrd": ["tablib"], + "xlwt": ["tablib"], + } + + # Helper function to find a dependency in the JSON tree + def find_dependency(package_name, tree): + for dep in tree: + if dep["package_name"] == package_name: + return dep + if dep["dependencies"]: + found = find_dependency(package_name, dep["dependencies"]) + if found: + return found + return None + + # Check each expected dependency in the JSON output + for dep_name, sub_deps in expected_dependencies.items(): + dep = find_dependency(dep_name, json_output) + assert dep is not None, f"{dep_name} not found in JSON output:\n{json_output}" + + # Check for sub-dependencies if any + for sub_dep in sub_deps: + sub_dep_found = find_dependency(sub_dep, dep.get("dependencies", [])) + assert sub_dep_found is not None, f"{sub_dep} not found under {dep_name} in JSON output:\n{json_output}" @pytest.mark.skip( diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index dd244306cb..9522b6ae10 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -191,7 +191,7 @@ def test_alternative_version_specifier(pipenv_instance_private_pypi): """.strip() f.write(contents) - c = p.pipenv("install") + c = p.pipenv("install -v") assert c.returncode == 0 assert "six" in p.lockfile["default"] diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 0fb82c22dd..d3ffd79254 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -1,4 +1,5 @@ import os +import sys import pytest from flaky import flaky @@ -134,13 +135,18 @@ def test_global_overrides_environment_markers(pipenv_instance_private_pypi): @flaky @pytest.mark.markers @pytest.mark.complex +@pytest.mark.skipif( + sys.version_info[:2] == (3, 8), reason="Test package that gets installed is different on 3.8" +) def test_resolver_unique_markers(pipenv_instance_pypi): - """vcrpy has a dependency on `yarl` which comes with a marker - of 'python version in "3.4, 3.5, 3.6" - this marker duplicates itself: + """Test that markers are properly cleaned and not duplicated when resolving + dependencies. Use vcrpy as an example package that pulls in dependencies + with Python version markers. - 'yarl; python version in "3.4, 3.5, 3.6"; python version in "3.4, 3.5, 3.6"' + This test verifies that even if a package ends up with duplicate markers like: + 'yarl; python_version >= "3.9"; python_version >= "3.9"' - This verifies that we clean that successfully. + The resolver will clean and deduplicate them appropriately. """ with pipenv_instance_pypi() as p: c = p.pipenv("install vcrpy==2.0.1") @@ -148,12 +154,9 @@ def test_resolver_unique_markers(pipenv_instance_pypi): assert "yarl" in p.lockfile["default"] yarl = p.lockfile["default"]["yarl"] assert "markers" in yarl - # Two possible marker sets are ok here - assert yarl["markers"] in [ - "python_version in '3.4, 3.5, 3.6'", - "python_version >= '3.4'", - "python_version >= '3.5'", # yarl 1.3.0 requires python 3.5.3 - ] + # Check for a valid Python version marker + # yarl >=1.16.0 (Oct 2024) requires Python >=3.9 + assert yarl["markers"] == "python_version >= '3.9'" @flaky diff --git a/tests/integration/test_install_vcs.py b/tests/integration/test_install_vcs.py index 390e1782b4..f1ddf7487f 100644 --- a/tests/integration/test_install_vcs.py +++ b/tests/integration/test_install_vcs.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import pytest @@ -43,3 +44,30 @@ def test_install_github_vcs_with_credentials(pipenv_instance_pypi, use_credentia # Verify that the package is installed and usable c = p.pipenv("run python -c \"import dataclass_factory\"") assert c.returncode == 0, f"Failed to import library: {c.stderr}" + + +@pytest.mark.vcs +@pytest.mark.urls +@pytest.mark.install +@pytest.mark.needs_internet +def test_install_vcs_ref_by_commit_hash(pipenv_instance_private_pypi): + with pipenv_instance_private_pypi() as p: + c = p.pipenv("install -e git+https://github.com/benjaminp/six.git@5efb522b0647f7467248273ec1b893d06b984a59#egg=six") + assert c.returncode == 0 + assert "six" in p.pipfile["packages"] + assert "six" in p.lockfile["default"] + assert ( + p.lockfile["default"]["six"]["ref"] + == "5efb522b0647f7467248273ec1b893d06b984a59" + ) + pipfile = Path(p.pipfile_path) + new_content = pipfile.read_text().replace("5efb522b0647f7467248273ec1b893d06b984a59", "15e31431af97e5e64b80af0a3f598d382bcdd49a") + pipfile.write_text(new_content) + c = p.pipenv("lock") + assert c.returncode == 0 + assert ( + p.lockfile["default"]["six"]["ref"] + == "15e31431af97e5e64b80af0a3f598d382bcdd49a" + ) + assert "six" in p.pipfile["packages"] + assert "six" in p.lockfile["default"] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 1fb1cc2b0f..7d7e8e04e3 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -255,7 +255,7 @@ def test_lock_extras_without_install(pipenv_instance_private_pypi): assert c.returncode == 0 assert "requests" in p.lockfile["default"] assert "pysocks" in p.lockfile["default"] - assert "markers" not in p.lockfile["default"]["pysocks"] + assert "markers" in p.lockfile["default"]["pysocks"] c = p.pipenv("lock") assert c.returncode == 0 @@ -558,7 +558,7 @@ def test_lock_nested_vcs_direct_url(pipenv_instance_pypi): "subdirectory": "parent_folder/pep508-package", }, ) - c = p.pipenv("lock") + c = p.pipenv("lock -v") assert c.returncode == 0 assert "git" in p.lockfile["default"]["pep508-package"] assert "sibling-package" in p.lockfile["default"] @@ -588,7 +588,7 @@ def test_lock_package_with_compatible_release_specifier(pipenv_instance_private_ @pytest.mark.install def test_default_lock_overwrite_dev_lock(pipenv_instance_pypi): with pipenv_instance_pypi() as p: - c = p.pipenv("install 'click==6.7'") + c = p.pipenv("install click==6.7") assert c.returncode == 0 c = p.pipenv("install -d flask") assert c.returncode == 0 diff --git a/tests/integration/test_lockfile.py b/tests/integration/test_lockfile.py index 285e30465c..daa0a9b568 100644 --- a/tests/integration/test_lockfile.py +++ b/tests/integration/test_lockfile.py @@ -40,7 +40,7 @@ def test_git_branch_contains_slashes(pipenv_instance_pypi, pypi_lockfile): deps, include_hashes=False, include_markers=True ) assert pip_installable_lines == [ - "google-api-python-client@ git+https://github.com/thehesiod/google-api-python-client.git@03803c21fc13a345e978f32775b2f2fa23c8e706" + "google-api-python-client @ git+https://github.com/thehesiod/google-api-python-client.git@03803c21fc13a345e978f32775b2f2fa23c8e706" ] @@ -62,5 +62,5 @@ def test_git_branch_contains_subdirectory_fragment(pipenv_instance_pypi, pypi_lo deps, include_hashes=False, include_markers=True ) assert pip_installable_lines == [ - "pep508_package@ git+https://github.com/techalchemy/test-project.git@03803c21fc13a345e978f32775b2f2fa23c8e706#subdirectory=parent_folder/pep508-package" + "pep508_package @ git+https://github.com/techalchemy/test-project.git@03803c21fc13a345e978f32775b2f2fa23c8e706#subdirectory=parent_folder/pep508-package" ] diff --git a/tests/integration/test_requirements.py b/tests/integration/test_requirements.py index 1ff97ef670..c761ded1fd 100644 --- a/tests/integration/test_requirements.py +++ b/tests/integration/test_requirements.py @@ -346,7 +346,7 @@ def test_requirements_generates_requirements_from_lockfile_without_env_var_expan True, True, [ - "pyjwt[crypto]@ git+https://github.com/jpadilla/pyjwt.git@7665aa625506a11bae50b56d3e04413a3dc6fdf8" + "pyjwt[crypto] @ git+https://github.com/jpadilla/pyjwt.git@7665aa625506a11bae50b56d3e04413a3dc6fdf8" ], ), ], diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index 5f998e8688..5529179aec 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -123,7 +123,7 @@ def test_uninstall_all_dev(pipenv_instance_private_pypi): """ f.write(contents) - c = p.pipenv("install --dev") + c = p.pipenv("install -v --dev") assert c.returncode == 0 assert "tablib" in p.pipfile["packages"] @@ -134,7 +134,7 @@ def test_uninstall_all_dev(pipenv_instance_private_pypi): assert "six" in p.lockfile["develop"] assert c.returncode == 0 - c = p.pipenv("uninstall --all-dev") + c = p.pipenv("uninstall -v --all-dev") assert c.returncode == 0 assert p.pipfile["dev-packages"] == {} assert "jinja2" not in p.lockfile["develop"] diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index b4f4dbc9bb..172f16d374 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -1,3 +1,6 @@ +import json +import os + import pytest @@ -50,3 +53,52 @@ def test_category_not_sorted_without_directive(pipenv_instance_private_pypi): "atomicwrites", "six", ] + + +@pytest.mark.cli +def test_pipenv_dependency_incompatibility_resolution(pipenv_instance_pypi): + from pipenv.cli import cli + from pipenv.vendor.click.testing import CliRunner + + with pipenv_instance_pypi() as p: + # Step 1: Install initial dependency version + c = p.pipenv("install google-api-core==2.18.0") + assert c.returncode == 0, f"Failed to install google-api-core: {c.stderr}" + + # Ensure initial state + lockfile_path = os.path.join(p.path, "Pipfile.lock") + with open(lockfile_path) as lockfile: + lock_data = json.load(lockfile) + assert "google-api-core" in lock_data["default"] + assert lock_data["default"]["google-api-core"]["version"] == "==2.18.0" + + # Step 2: Update Pipfile to allow any version of google-api-core + pipfile_path = os.path.join(p.path, "Pipfile") + with open(pipfile_path) as pipfile: + pipfile_content = pipfile.read() + + updated_pipfile_content = pipfile_content.replace("google-api-core = \"==2.18.0\"", "google-api-core = \"*\"") + with open(pipfile_path, "w") as pipfile: + pipfile.write(updated_pipfile_content) + + # Step 3: Update protobuf to an incompatible version + cli_runner = CliRunner(mix_stderr=False) + c = cli_runner.invoke(cli, "update protobuf==5.27.5") + assert c.exit_code == 0, f"Failed to update protobuf: {c.stderr}" + + # Step 4: Check the lockfile for incompatible dependencies + with open(lockfile_path) as lockfile: + lock_data = json.load(lockfile) + + # Check if google-api-core is still at the old version + google_api_core_version = lock_data["default"].get("google-api-core", {}).get("version", "") + protobuf_version = lock_data["default"].get("protobuf", {}).get("version", "") + + assert google_api_core_version != "==2.18.0", ( + "google-api-core was not updated to a compatible version despite the protobuf update" + ) + assert protobuf_version == "==5.27.5", "protobuf was not updated correctly" + + # Step 5: Run pipenv lock to check for dependency resolution errors + c = cli_runner.invoke(cli, "lock") + assert c.exit_code == 0, f"Failed to run pipenv lock: {c.stderr}" diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index b95c285240..337b1816c8 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -27,7 +27,7 @@ } }, { - "dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" + "dataclasses-json": "dataclasses-json @ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" }, ), ( @@ -38,7 +38,7 @@ } }, { - "dataclasses-json": "dataclasses-json@ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" + "dataclasses-json": "dataclasses-json @ git+https://github.com/lidatong/dataclasses-json.git@v0.5.7" }, ), ( @@ -63,7 +63,7 @@ } }, { - "requests": "requests[security]@ git+https://github.com/requests/requests.git@main" + "requests": "requests[security] @ git+https://github.com/requests/requests.git@main" }, ), ] @@ -141,7 +141,7 @@ def test_convert_deps_to_pip_extras_no_version(): } }, { - "uvicorn": "uvicorn[standard]@ git+https://github.com/encode/uvicorn.git@master" + "uvicorn": "uvicorn[standard] @ git+https://github.com/encode/uvicorn.git@master" }, ), ],