Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Invoke the resolver in the same process as pipenv rather than utilzing subprocess. #5787

Merged
merged 12 commits into from
Jul 11, 2023
Merged
1 change: 1 addition & 0 deletions news/5787.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Invoke the resolver in the same process as pipenv rather than utilizing subprocess.
7 changes: 1 addition & 6 deletions pipenv/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,12 +542,7 @@ def get_distributions(self) -> Generator[pkg_resources.Distribution, None, None]
:rtype: iterator
"""

pip_target_dir = os.environ.get("PIP_TARGET")
libdirs = (
[pip_target_dir]
if pip_target_dir
else self.base_paths["libdirs"].split(os.pathsep)
)
libdirs = self.base_paths["libdirs"].split(os.pathsep)
dists = (pkg_resources.find_distributions(libdir) for libdir in libdirs)
yield from itertools.chain.from_iterable(dists)

Expand Down
14 changes: 7 additions & 7 deletions pipenv/patched/pip/_internal/req/constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,13 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:

link = Link(url)

# if not link.is_vcs:
# backends = ", ".join(vcs.all_schemes)
# raise InstallationError(
# f"{editable_req} is not a valid editable requirement. "
# f"It should either be a path to a local project or a VCS URL "
# f"(beginning with {backends})."
# )
if not link.is_vcs:
backends = ", ".join(vcs.all_schemes)
raise InstallationError(
f"{editable_req} is not a valid editable requirement. "
f"It should either be a path to a local project or a VCS URL "
f"(beginning with {backends})."
)

package_name = link.egg_fragment
if not package_name:
Expand Down
105 changes: 9 additions & 96 deletions pipenv/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,11 +140,9 @@ def __init__(
self._parents_in_pipfile = []

@staticmethod
def make_requirement(name=None, entry=None, from_ireq=False):
def make_requirement(name=None, entry=None):
from pipenv.vendor.requirementslib.models.requirements import Requirement

if from_ireq:
return Requirement.from_ireq(entry)
return Requirement.from_pipfile(name, entry)

@classmethod
Expand Down Expand Up @@ -446,58 +444,6 @@ def get_parent_deps(self, unnest=False):
parents.extend(parent.flattened_parents)
return parents

def ensure_least_updates_possible(self):
"""
Mutate the current entry to ensure that we are making the smallest amount of
changes possible to the existing lockfile -- this will keep the old locked
versions of packages if they satisfy new constraints.

:return: None
"""
constraints = self.get_constraints()
can_use_original = True
can_use_updated = True
satisfied_by_versions = set()
for constraint in constraints:
if not constraint.specifier.contains(self.original_version):
self.can_use_original = False
if not constraint.specifier.contains(self.updated_version):
self.can_use_updated = False
satisfied_by_value = getattr(constraint, "satisfied_by", None)
if satisfied_by_value:
satisfied_by = "{}".format(
self.clean_specifier(str(satisfied_by_value.version))
)
satisfied_by_versions.add(satisfied_by)
if can_use_original:
self.entry_dict = self.lockfile_dict.copy()
elif can_use_updated:
if len(satisfied_by_versions) == 1:
self.entry_dict["version"] = next(
iter(sat_by for sat_by in satisfied_by_versions if sat_by), None
)
hashes = None
if self.lockfile_entry.specifiers == satisfied_by:
ireq = self.lockfile_entry.as_ireq
if (
not self.lockfile_entry.hashes
and self.resolver._should_include_hash(ireq)
):
hashes = self.resolver.get_hash(ireq)
else:
hashes = self.lockfile_entry.hashes
else:
if self.resolver._should_include_hash(constraint):
hashes = self.resolver.get_hash(constraint)
if hashes:
self.entry_dict["hashes"] = list(hashes)
self._entry.hashes = frozenset(hashes)
else:
# check for any parents, since they depend on this and the current
# installed versions are not compatible with the new version, so
# we will need to update the top level dependency if possible
self.check_flattened_parents()

def get_constraints(self):
"""
Retrieve all of the relevant constraints, aggregated from the pipfile, resolver,
Expand Down Expand Up @@ -668,7 +614,7 @@ def parse_packages(packages, pre, clear, system, requirements_dir=None):


def resolve_packages(
pre, clear, verbose, system, write, requirements_dir, packages, category
pre, clear, verbose, system, requirements_dir, packages, category, constraints=None
):
from pipenv.utils.internet import create_mirror_source, replace_pypi_sources
from pipenv.utils.resolver import resolve_deps
Expand All @@ -679,6 +625,9 @@ def resolve_packages(
else None
)

if constraints:
packages += constraints

def resolve(
packages, pre, project, sources, clear, system, category, requirements_dir=None
):
Expand Down Expand Up @@ -713,43 +662,9 @@ def resolve(
requirements_dir=requirements_dir,
)
results = clean_results(results, resolver, project, category)
if write:
with open(write, "w") as fh:
if not results:
json.dump([], fh)
else:
json.dump(results, fh)
else:
print("RESULTS:")
if results:
print(json.dumps(results))
else:
print(json.dumps([]))


def _main(
pre,
clear,
verbose,
system,
write,
requirements_dir,
packages,
parse_only=False,
category=None,
):
if parse_only:
parse_packages(
packages,
pre=pre,
clear=clear,
system=system,
requirements_dir=requirements_dir,
)
else:
resolve_packages(
pre, clear, verbose, system, write, requirements_dir, packages, category
)
if results:
return results
return []


def main(argv=None):
Expand All @@ -767,15 +682,13 @@ def main(argv=None):
os.environ["PYTHONIOENCODING"] = "utf-8"
os.environ["PYTHONUNBUFFERED"] = "1"
parsed = handle_parsed_args(parsed)
_main(
resolve_packages(
parsed.pre,
parsed.clear,
parsed.verbose,
parsed.system,
parsed.write,
parsed.requirements_dir,
parsed.packages,
parse_only=parsed.parse_only,
category=parsed.category,
)

Expand Down
2 changes: 1 addition & 1 deletion pipenv/routines/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def do_graph(project, bare=False, json=False, json_tree=False, reverse=False):

pipdeptree_path = os.path.dirname(pipdeptree.__file__.rstrip("cdo"))
try:
python_path = project._which("python")
python_path = project.python()
except AttributeError:
click.echo(
"{}: {}".format(
Expand Down
2 changes: 0 additions & 2 deletions pipenv/utils/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,6 @@ def get_vcs_deps(project=None, dev=False, pypi_mirror=None, packages=None, reqs=
Requirement,
)

# from distutils.sysconfig import get_python_lib
# sys.path = [repo.checkout_directory, "", ".", get_python_lib(plat_specific=0)]
commit_hash = repo.commit_hash
name = requirement.normalized_name
lockfile[name] = requirement.pipfile_entry[1]
Expand Down
87 changes: 27 additions & 60 deletions pipenv/utils/resolver.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import contextlib
import hashlib
import json
import os
import subprocess
import sys
import tempfile
import warnings
from functools import lru_cache
from html.parser import HTMLParser
from pathlib import Path
from typing import Dict, List, Optional, Set, Tuple, Union
from urllib import parse

Expand All @@ -30,6 +27,7 @@
from pipenv.patched.pip._internal.utils.temp_dir import global_tempdir_manager
from pipenv.patched.pip._vendor import pkg_resources, rich
from pipenv.project import Project
from pipenv.resolver import resolve_packages
from pipenv.vendor import click
from pipenv.vendor.requirementslib.fileutils import create_tracked_tempdir, open_file
from pipenv.vendor.requirementslib.models.requirements import Line, Requirement
Expand Down Expand Up @@ -57,7 +55,7 @@
from .indexes import parse_indexes, prepare_pip_source_args
from .internet import _get_requests_session, is_pypi_url
from .locking import format_requirement_for_lockfile, prepare_lockfile
from .shell import make_posix, subprocess_run, temp_environ
from .shell import subprocess_run, temp_environ

console = rich.console.Console()
err = rich.console.Console(stderr=True)
Expand Down Expand Up @@ -282,13 +280,13 @@ def parse_line(
project = Project()
index, extra_index, trust_host, remainder = parse_indexes(line)
line = " ".join(remainder)
req: Requirement = None
try:
req = Requirement.from_line(line)
except ValueError:
direct_url = DIRECT_URL_RE.match(line)
if direct_url:
line = "{}#egg={}".format(line, direct_url.groupdict()["name"])
name = direct_url.groupdict()["name"]
line = f"{name}@ {line}"
try:
req = Requirement.from_line(line)
except ValueError:
Expand Down Expand Up @@ -676,7 +674,9 @@ def constraints(self):
def get_resolver(self, clear=False):
from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory

with global_tempdir_manager(), get_build_tracker() as build_tracker, TempDirectory() as directory:
with global_tempdir_manager(), get_build_tracker() as build_tracker, TempDirectory(
globally_managed=True
) as directory:
pip_options = self.pip_options
finder = self.finder
wheel_cache = WheelCache(pip_options.cache_dir)
Expand Down Expand Up @@ -952,11 +952,8 @@ def actually_resolve_deps(
clear,
pre,
category,
req_dir=None,
req_dir,
):
if not req_dir:
req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-")

with warnings.catch_warnings(record=True) as warning_list:
resolver = Resolver.create(
deps,
Expand Down Expand Up @@ -1047,8 +1044,6 @@ def venv_resolve_deps(
:return: The lock data
:rtype: dict
"""
from pipenv import resolver

lockfile_section = get_lockfile_section_using_pipfile_category(category)

if not deps:
Expand All @@ -1063,30 +1058,11 @@ def venv_resolve_deps(
if lockfile is None:
lockfile = project.lockfile(categories=[category])
req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements")
cmd = [
which("python", allow_global=allow_global),
Path(resolver.__file__.rstrip("co")).as_posix(),
]
if pre:
cmd.append("--pre")
if clear:
cmd.append("--clear")
if allow_global:
cmd.append("--system")
if category:
cmd.append("--category")
cmd.append(category)
target_file = tempfile.NamedTemporaryFile(
prefix="resolver", suffix=".json", delete=False
)
target_file.close()
cmd.extend(["--write", make_posix(target_file.name)])
results = []
with temp_environ():
os.environ.update({k: str(val) for k, val in os.environ.items()})
if pypi_mirror:
os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
os.environ["PIPENV_VERBOSITY"] = str(project.s.PIPENV_VERBOSITY)
os.environ["PIPENV_REQ_DIR"] = req_dir
os.environ["PIP_NO_INPUT"] = "1"
pipenv_site_dir = get_pipenv_sitedir()
if pipenv_site_dir is not None:
Expand All @@ -1099,37 +1075,29 @@ def venv_resolve_deps(
# dependency resolution on them, so we are including this step inside the
# spinner context manager for the UX improvement
st.console.print("Building requirements...")
deps = convert_deps_to_pip(deps, project, include_index=True)
deps = convert_deps_to_pip(deps, project)
constraints = set(deps)
with tempfile.NamedTemporaryFile(
mode="w+", prefix="pipenv", suffix="constraints.txt", delete=False
) as constraints_file:
constraints_file.write(str("\n".join(constraints)))
cmd.append("--constraints-file")
cmd.append(constraints_file.name)
st.console.print("Resolving dependencies...")
c = resolve(cmd, st, project=project)
if c.returncode == 0:
st.console.print(environments.PIPENV_SPINNER_OK_TEXT.format("Success!"))
if not project.s.is_verbose() and c.stderr.strip():
click.echo(click.style(f"Warning: {c.stderr.strip()}"), err=True)
else:
try:
results = resolve_packages(
pre,
clear,
project.s.is_verbose(),
allow_global,
req_dir,
packages=deps,
category=category,
constraints=constraints,
)
if results:
st.console.print(
environments.PIPENV_SPINNER_OK_TEXT.format("Success!")
)
except Exception:
st.console.print(
environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!")
)
click.echo(f"Output: {c.stdout.strip()}", err=True)
click.echo(f"Error: {c.stderr.strip()}", err=True)
try:
with open(target_file.name) as fh:
results = json.load(fh)
except (IndexError, json.JSONDecodeError):
click.echo(c.stdout.strip(), err=True)
click.echo(c.stderr.strip(), err=True)
if os.path.exists(target_file.name):
os.unlink(target_file.name)
raise RuntimeError("There was a problem with locking.")
if os.path.exists(target_file.name):
os.unlink(target_file.name)
raise RuntimeError("There was a problem with locking.")
if lockfile_section not in lockfile:
lockfile[lockfile_section] = {}
return prepare_lockfile(results, pipfile, lockfile[lockfile_section])
Expand Down Expand Up @@ -1159,7 +1127,6 @@ def resolve_deps(
if not deps:
return results, resolver
# First (proper) attempt:
req_dir = req_dir if req_dir else os.environ.get("req_dir", None)
if not req_dir:
req_dir = create_tracked_tempdir(prefix="pipenv-", suffix="-requirements")
with HackedPythonVersion(python_path=project.python(system=allow_global)):
Expand Down
Loading