From 36f054d3a81790918a9300808c4302f247a31685 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 8 Nov 2018 04:47:26 -0500 Subject: [PATCH 01/23] Grab updates from latest vendored changes Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/models/pyenv.py | 7 +- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 2 +- .../requirementslib/models/baserequirement.py | 37 --------- .../vendor/requirementslib/models/markers.py | 6 +- .../vendor/requirementslib/models/pipfile.py | 39 +++++++-- .../requirementslib/models/requirements.py | 81 +++++++++++++++---- pipenv/vendor/requirementslib/models/utils.py | 5 +- pipenv/vendor/vistir/spin.py | 2 + 10 files changed, 113 insertions(+), 70 deletions(-) delete mode 100644 pipenv/vendor/requirementslib/models/baserequirement.py diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index 0b22546acc..85666b5c28 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = '1.1.7' +__version__ = '1.1.8' # Add NullHandler to "pythonfinder" logger, because Python2's default root # logger has no handler and warnings like this would be reported: diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index 4a8dfc65cd..ac7f8588ac 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -45,13 +45,16 @@ def expanded_paths(self): ) def get_version_order(self): - version_order_file = self.root.joinpath("version").read_text(encoding="utf-8") + version_order_file, version_order_lines = self.root.joinpath("version"), [] + if version_order_file.exists(): + version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() + version_paths = [ p for p in self.root.glob("versions/*") if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} - version_order = [versions[v] for v in version_order_file.splitlines() if v in versions] + version_order = [versions[v] for v in version_order_lines if v in versions] for version in version_order: version_paths.remove(version) version_order += version_paths diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index ec99afe731..24d520b6d2 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -173,7 +173,7 @@ def parse(cls, version): def get_architecture(self): if self.architecture: return self.architecture - arch, _ = platform.architecture(path.path.as_posix()) + arch, _ = platform.architecture(self.comes_from.path.as_posix()) self.architecture = arch return self.architecture diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index ba0ce9ae69..edbab5bc8a 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.5' +__version__ = '1.2.6' import logging diff --git a/pipenv/vendor/requirementslib/models/baserequirement.py b/pipenv/vendor/requirementslib/models/baserequirement.py deleted file mode 100644 index b97dee40a1..0000000000 --- a/pipenv/vendor/requirementslib/models/baserequirement.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -import abc -import attr -import six - - -@six.add_metaclass(abc.ABCMeta) -class BaseRequirement: - @classmethod - def from_line(cls, line): - """Returns a requirement from a requirements.txt or pip-compatible line""" - raise NotImplementedError - - @abc.abstractmethod - def line_part(self): - """Returns the current requirement as a pip-compatible line""" - - @classmethod - def from_pipfile(cls, name, pipfile): - """Returns a requirement from a pipfile entry""" - raise NotImplementedError - - @abc.abstractmethod - def pipfile_part(self): - """Returns the current requirement as a pipfile entry""" - - @classmethod - def attr_fields(cls): - return [field.name for field in attr.fields(cls)] - - @property - def extras_as_pip(self): - if self.extras: - return "[{0}]".format(",".join(self.extras)) - - return "" diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 83b44b6344..70fe3bc035 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -4,12 +4,11 @@ from packaging.markers import InvalidMarker, Marker from ..exceptions import RequirementError -from .baserequirement import BaseRequirement from .utils import filter_none, validate_markers @attr.s -class PipenvMarkers(BaseRequirement): +class PipenvMarkers(object): """System-level requirements - see PEP508 for more detail""" os_name = attr.ib( @@ -78,7 +77,8 @@ def from_line(cls, line): @classmethod def from_pipfile(cls, name, pipfile): - found_keys = [k for k in pipfile.keys() if k in cls.attr_fields()] + attr_fields = [field.name for field in attr.fields(cls)] + found_keys = [k for k in pipfile.keys() if k in attr_fields] marker_strings = ["{0} {1}".format(k, pipfile[k]) for k in found_keys] if pipfile.get("markers"): marker_strings.append(pipfile.get("markers")) diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index fe7743c2ca..58d540559e 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -38,12 +38,16 @@ def load(cls, f, encoding=None): content = content.decode(encoding) _data = tomlkit.loads(content) if "source" not in _data: - # HACK: There is no good way to prepend a section to an existing - # TOML document, but there's no good way to copy non-structural - # content from one TOML document to another either. Modify the - # TOML content directly, and load the new in-memory document. - sep = "" if content.startswith("\n") else "\n" - content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content + if "sources" in _data: + _data["source"] = _data["sources"] + content = tomlkit.dumps(_data) + else: + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content data = tomlkit.loads(content) return cls(data) @@ -53,6 +57,8 @@ class Pipfile(object): path = attr.ib(validator=is_path, type=Path) projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) _pipfile = attr.ib(type=plette.pipfiles.Pipfile) + _pyproject = attr.ib(default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument) + build_system = attr.ib(default=attr.Factory(dict), type=dict) requirements = attr.ib(default=attr.Factory(list), type=list) dev_requirements = attr.ib(default=attr.Factory(list), type=list) @@ -212,3 +218,24 @@ def packages(self, as_requirements=True): if as_requirements: return self.requirements return self._pipfile.get('packages', {}) + + def _read_pyproject(self): + pyproject = self.path.parent.joinpath("pyproject.toml") + if pyproject.exists(): + self._pyproject = tomlkit.load(pyproject) + build_system = self._pyproject.get("build-system", None) + if not os.path.exists(self.path_to("setup.py")): + if not build_system or not build_system.get("requires"): + build_system = { + "requires": ["setuptools>=38.2.5", "wheel"], + "build-backend": "setuptools.build_meta", + } + self._build_system = build_system + + @property + def build_requires(self): + return self.build_system.get("requires", []) + + @property + def build_backend(self): + return self.build_system.get("build-backend", None) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 8d087d2309..ce2b0927f7 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -36,7 +36,6 @@ add_ssh_scheme_to_git_uri, strip_ssh_from_git_uri, ) -from .baserequirement import BaseRequirement from .utils import ( HASH_STRING, build_vcs_link, @@ -61,7 +60,7 @@ @attr.s(slots=True) -class NamedRequirement(BaseRequirement): +class NamedRequirement(object): name = attr.ib() version = attr.ib(validator=attr.validators.optional(validate_specifiers)) req = attr.ib() @@ -98,7 +97,8 @@ def from_line(cls, line): def from_pipfile(cls, name, pipfile): creation_args = {} if hasattr(pipfile, "keys"): - creation_args = {k: v for k, v in pipfile.items() if k in cls.attr_fields()} + attr_fields = [field.name for field in attr.fields(cls)] + creation_args = {k: v for k, v in pipfile.items() if k in attr_fields} creation_args["name"] = name version = get_version(pipfile) extras = creation_args.get("extras", None) @@ -131,7 +131,7 @@ def pipfile_part(self): @attr.s(slots=True) -class FileRequirement(BaseRequirement): +class FileRequirement(object): """File requirements for tar.gz installable files or wheels or setup.py containing directories.""" @@ -152,6 +152,8 @@ class FileRequirement(BaseRequirement): name = attr.ib() #: A :class:`~pkg_resources.Requirement` isntance req = attr.ib() + #: Whether this is a direct url requirement + is_direct = attr.ib(default=False) _uri_scheme = attr.ib(default=None) @classmethod @@ -256,11 +258,17 @@ def get_link_from_line(cls, line): return LinkInfo(vcs_type, prefer, relpath, path, uri, link) + def __attrs_post_init__(self): + if self.req and getattr(self.req, "url"): + self.uri = self.req.url + @uri.default def get_uri(self): if self.path and not self.uri: self._uri_scheme = "path" self.uri = pip_shims.shims.path_to_url(os.path.abspath(self.path)) + elif self.req and getattr(self.req, "url"): + self.uri = self.req.url @name.default def get_name(self): @@ -268,6 +276,8 @@ def get_name(self): if loc: self._uri_scheme = "path" if self.path else "uri" name = None + if self.req and getattr(self.req, "name"): + return self.req.name if self.link and self.link.egg_fragment: return self.link.egg_fragment elif self.link and self.link.is_wheel: @@ -326,9 +336,18 @@ def get_link(self): @req.default def get_requirement(self): - req = init_requirement(normalize_name(self.name)) - req.editable = False - req.line = self.link.url_without_fragment + if self.link.is_artifact and not self.editable: + if self._uri_scheme == "uri": + if self.name: + req_str = "{0} @{1}".format(self.name, self.link.url_without_fragment) + else: + req_str = "{0}".format(self.link.url_without_fragment) + req = init_requirement(req_str) + req.line = req_str + else: + req = init_requirement(normalize_name(self.name)) + req.editable = False + req.line = self.link.url_without_fragment if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path @@ -337,7 +356,8 @@ def get_requirement(self): else: req.local_file = False req.path = None - req.url = self.link.url_without_fragment + if not getattr(req, "url", None): + req.url = self.link.url_without_fragment if self.editable: req.editable = True req.link = self.link @@ -351,9 +371,13 @@ def is_remote_artifact(self): for scheme in ("http", "https", "ftp", "ftps", "uri") ) and (self.link.is_artifact or self.link.is_wheel) - and not self.req.editable + and not self.editable ) + @property + def is_direct_url(self): + return self.is_remote_artifact + @property def formatted_path(self): if self.path: @@ -371,10 +395,18 @@ def from_line(cls, line): editable = line.startswith("-e ") line = line.split(" ", 1)[1] if editable else line setup_path = None + name = None + req = None if not any([is_installable_file(line), is_valid_url(line), is_file_url(line)]): - raise RequirementError( - "Supplied requirement is not installable: {0!r}".format(line) - ) + try: + req = init_requirement(line) + except Exception: + raise RequirementError( + "Supplied requirement is not installable: {0!r}".format(line) + ) + else: + name = getattr(req, "name", None) + line = getattr(req, "url", None) vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) setup_path = Path(path) / "setup.py" if path else None arg_dict = { @@ -389,8 +421,12 @@ def from_line(cls, line): from pip_shims import Wheel arg_dict["name"] = Wheel(link.filename).name + elif name: + arg_dict["name"] = name elif link.egg_fragment: arg_dict["name"] = link.egg_fragment + if req: + arg_dict["req"] = req created = cls(**arg_dict) return created @@ -428,7 +464,9 @@ def from_pipfile(cls, name, pipfile): if not uri: uri = pip_shims.shims.path_to_url(path) link = create_link(uri) - + req = None + if link.is_artifact and not link.is_wheel and not link.scheme.startswith("file"): + req = init_requirement("{0}@{1}".format(name, uri)) arg_dict = { "name": name, "path": path, @@ -437,6 +475,8 @@ def from_pipfile(cls, name, pipfile): "link": link, "uri_scheme": uri_scheme, } + if req: + arg_dict["req"] = req return cls(**arg_dict) @property @@ -449,7 +489,10 @@ def line_part(self): seed = unquote(self.link.url_without_fragment) or self.uri # add egg fragments to remote artifacts (valid urls only) if not self._has_hashed_name and self.is_remote_artifact: - seed += "#egg={0}".format(self.name) + if not self.link.is_wheel and self.link.is_artifact: + seed = "{0}@{1}".format(self.name, seed) + else: + seed += "#egg={0}".format(self.name) editable = "-e " if self.editable else "" return "{0}{1}".format(editable, seed) @@ -575,7 +618,8 @@ def get_requirement(self): ) req = init_requirement(canonicalize_name(self.name)) req.editable = self.editable - req.url = self.uri + if not getattr(req, "url") and self.uri: + req.url = self.uri req.line = self.link.url if self.ref: req.revision = self.ref @@ -813,7 +857,7 @@ def pipfile_part(self): class Requirement(object): name = attr.ib() vcs = attr.ib(default=None, validator=attr.validators.optional(validate_vcs)) - req = attr.ib(default=None, validator=optional_instance_of(BaseRequirement)) + req = attr.ib(default=None) markers = attr.ib(default=None) specifiers = attr.ib(validator=attr.validators.optional(validate_specifiers)) index = attr.ib(default=None) @@ -915,8 +959,11 @@ def from_line(cls, line): # Installable local files and installable non-vcs urls are handled # as files, generally speaking line_is_vcs = is_vcs(line) + # check for pep-508 compatible requirements + name, _, possible_url = line.partition("@") if is_installable_file(line) or ( - (is_file_url(line) or is_valid_url(line)) and not line_is_vcs + (is_valid_url(possible_url) or is_file_url(line) or is_valid_url(line)) and + not (line_is_vcs or is_vcs(possible_url)) ): r = FileRequirement.from_line(line_with_prefix) elif line_is_vcs: diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index fbaaf1a417..aa7ffd681c 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -424,17 +424,18 @@ def make_install_requirement(name, version, extras, markers, constraint=False): """ # If no extras are specified, the extras string is blank + from pip_shims.shims import install_req_from_line extras_string = "" if extras: # Sort extras for stability extras_string = "[{}]".format(",".join(sorted(extras))) if not markers: - return ireq_from_line( + return install_req_from_line( str('{}{}=={}'.format(name, extras_string, version)), constraint=constraint) else: - return ireq_from_line( + return install_req_from_line( str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), constraint=constraint) diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index f8c4e0095c..f0d9e77ffd 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -153,6 +153,7 @@ def __init__(self, *args, **kwargs): def ok(self, text="OK"): """Set Ok (success) finalizer to a spinner.""" + # Do not display spin text for ok state self._text = None _text = text if text else "OK" @@ -160,6 +161,7 @@ def ok(self, text="OK"): def fail(self, text="FAIL"): """Set fail finalizer to a spinner.""" + # Do not display spin text for fail state self._text = None _text = text if text else "FAIL" From 5b496705a095db7cbb89065fce0b90fe8e9e3f42 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 8 Nov 2018 13:13:54 -0500 Subject: [PATCH 02/23] Fix broken requirementslib updates Signed-off-by: Dan Ryan --- .../requirementslib/models/requirements.py | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index ce2b0927f7..a36e5ba46c 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -143,6 +143,7 @@ class FileRequirement(object): editable = attr.ib(default=False) #: Extras if applicable extras = attr.ib(default=attr.Factory(list)) + _uri_scheme = attr.ib(default=None) #: URI of the package uri = attr.ib() #: Link object representing the package to clone @@ -154,7 +155,6 @@ class FileRequirement(object): req = attr.ib() #: Whether this is a direct url requirement is_direct = attr.ib(default=False) - _uri_scheme = attr.ib(default=None) @classmethod def get_link_from_line(cls, line): @@ -276,7 +276,7 @@ def get_name(self): if loc: self._uri_scheme = "path" if self.path else "uri" name = None - if self.req and getattr(self.req, "name"): + if getattr(self, "req", None) and getattr(self.req, "name"): return self.req.name if self.link and self.link.egg_fragment: return self.link.egg_fragment @@ -339,25 +339,27 @@ def get_requirement(self): if self.link.is_artifact and not self.editable: if self._uri_scheme == "uri": if self.name: - req_str = "{0} @{1}".format(self.name, self.link.url_without_fragment) + req_str = "{0} @ {1}".format(self.name, self.link.url_without_fragment) else: req_str = "{0}".format(self.link.url_without_fragment) req = init_requirement(req_str) req.line = req_str + else: + req = init_requirement(normalize_name(self.name)) else: req = init_requirement(normalize_name(self.name)) req.editable = False req.line = self.link.url_without_fragment - if self.path and self.link and self.link.scheme.startswith("file"): - req.local_file = True - req.path = self.path - req.url = None - self._uri_scheme = "file" - else: - req.local_file = False - req.path = None - if not getattr(req, "url", None): - req.url = self.link.url_without_fragment + if self.path and self.link and self.link.scheme.startswith("file"): + req.local_file = True + req.path = self.path + req.url = None + self._uri_scheme = "file" + else: + req.local_file = False + req.path = None + if not getattr(req, "url", None): + req.url = self.link.url_without_fragment if self.editable: req.editable = True req.link = self.link @@ -1148,7 +1150,7 @@ def get_requirement(self): req.line = req_line req.specifier = SpecifierSet(self.specifiers if self.specifiers else "") if self.is_vcs or self.is_file_or_url: - req.url = self.req.link.url_without_fragment + req.url = getattr(self.req.req, "url", self.req.link.url_without_fragment) req.marker = self.get_markers() req.extras = set(self.extras) if self.extras else set() return req From 2b90c89d1f518a543f775cb811ee8b5565ee1366 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 16:30:42 -0500 Subject: [PATCH 03/23] Revendor requirementslib - Implement improvements and bugfixes in codebase - Remote archives will now resolve properly Signed-off-by: Dan Ryan --- pipenv/__init__.py | 6 +- pipenv/_compat.py | 2 +- pipenv/core.py | 15 +- pipenv/project.py | 105 ++--- pipenv/utils.py | 14 +- pipenv/vendor/requirementslib/__init__.py | 4 + pipenv/vendor/requirementslib/exceptions.py | 75 ++++ .../vendor/requirementslib/models/lockfile.py | 68 +++- .../vendor/requirementslib/models/pipfile.py | 44 +- .../requirementslib/models/requirements.py | 340 +++++++++++----- .../requirementslib/models/setup_info.py | 378 ++++++++++++++++++ pipenv/vendor/requirementslib/models/utils.py | 41 +- pipenv/vendor/requirementslib/models/vcs.py | 3 +- tests/integration/conftest.py | 13 + tests/integration/test_uninstall.py | 6 +- 15 files changed, 944 insertions(+), 170 deletions(-) create mode 100644 pipenv/vendor/requirementslib/models/setup_info.py diff --git a/pipenv/__init__.py b/pipenv/__init__.py index f8a1a8b3e1..ba4dd9c3e0 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -10,7 +10,7 @@ from .__version__ import __version__ -PIPENV_ROOT = os.path.dirname(os.path.realpath(__file__)) +PIPENV_ROOT = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) PIPENV_VENDOR = os.sep.join([PIPENV_ROOT, "vendor"]) PIPENV_PATCHED = os.sep.join([PIPENV_ROOT, "patched"]) # Inject vendored directory into system path. @@ -27,11 +27,13 @@ if sys.version_info >= (3, 1) and sys.version_info <= (3, 6): if sys.stdout.isatty() and sys.stderr.isatty(): import io + import atexit sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(sys.stdout.close) sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(sys.stdout.close) os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = fs_str("1") -os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip") # Hack to make things work better. try: diff --git a/pipenv/_compat.py b/pipenv/_compat.py index 6e5ae6a7fb..fb2c01477f 100644 --- a/pipenv/_compat.py +++ b/pipenv/_compat.py @@ -382,7 +382,7 @@ def decode_output(output): except (AttributeError, UnicodeDecodeError, UnicodeEncodeError): if six.PY2: output = unicode.translate(vistir.misc.to_text(output), - UNICODE_TO_ASCII_TRANSLATION_MAP) + UNICODE_TO_ASCII_TRANSLATION_MAP) else: output = output.translate(UNICODE_TO_ASCII_TRANSLATION_MAP) output = output.encode(DEFAULT_ENCODING, "replace") diff --git a/pipenv/core.py b/pipenv/core.py index f9b8eaeec2..3cbd1645f1 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -40,8 +40,8 @@ clean_resolved_dep, parse_indexes, escape_cmd, - fix_venv_site, create_spinner, + get_canonical_names ) from . import environments, pep508checker, progress from .environments import ( @@ -1296,7 +1296,7 @@ def pip_install( pypi_mirror=None, trusted_hosts=None ): - from notpip._internal import logger as piplogger + from pipenv.patched.notpip._internal import logger as piplogger from .utils import Mapping from .vendor.urllib3.util import parse_url @@ -1746,11 +1746,11 @@ def do_install( if requirements or package_args or project.pipfile_exists: skip_requirements = True # Don't attempt to install develop and default packages if Pipfile is missing - if not project.pipfile_exists and not (packages or dev) and not code: - if not (skip_lock or deploy): - raise exceptions.PipfileNotFound(project.pipfile_location) - elif (skip_lock or deploy) and not project.lockfile_exists: - raise exceptions.LockfileNotFound(project.lockfile_location) + if not project.pipfile_exists and not (package_args or dev) and not code: + if not (ignore_pipfile or deploy): + raise exceptions.PipfileNotFound(project.path_to("Pipfile")) + elif ((skip_lock and deploy) or ignore_pipfile) and not project.lockfile_exists: + raise exceptions.LockfileNotFound(project.path_to("Pipfile.lock")) concurrent = not sequential # Ensure that virtualenv is available. ensure_project( @@ -2092,7 +2092,6 @@ def do_uninstall( ) ) package_names = develop - fix_venv_site(project.env_paths["lib"]) # Remove known "bad packages" from the list. bad_pkgs = set([canonicalize_name(pkg) for pkg in BAD_PACKAGES]) for bad_package in BAD_PACKAGES: diff --git a/pipenv/project.py b/pipenv/project.py index 26b4cf0ce6..d4713b89f2 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -804,7 +804,7 @@ def create_pipfile(self, python=None): .lstrip("\n") .split("\n") ) - sources = [DEFAULT_SOURCE] + sources = [DEFAULT_SOURCE,] for i, index in enumerate(indexes): if not index: continue @@ -831,55 +831,68 @@ def create_pipfile(self, python=None): version = python_version(required_python) or PIPENV_DEFAULT_PYTHON_VERSION if version and len(version) >= 3: data[u"requires"] = {"python_version": version[: len("2.7")]} - self.write_toml(data, "Pipfile") + self.write_toml(data) def get_or_create_lockfile(self): - from requirementslib.models.lockfile import Lockfile as Req_Lockfile + from pipenv.vendor.requirementslib.models.lockfile import Lockfile as Req_Lockfile lockfile = None - try: - lockfile = Req_Lockfile.load(self.lockfile_location) - except OSError: - lockfile = Req_Lockfile(self.lockfile_content) - return lockfile + if self.lockfile_exists: + try: + lockfile = Req_Lockfile.load(self.lockfile_location) + except OSError: + lockfile = Req_Lockfile.from_data(self.lockfile_location, self.lockfile_content) else: - if lockfile._lockfile is not None: - return lockfile - if self.lockfile_exists and self.lockfile_content: - from .vendor.plette.lockfiles import Lockfile - lockfile_dict = self.lockfile_content.copy() - sources = lockfile_dict["_meta"].get("sources", []) - if not sources: - sources = self.pipfile_sources - elif not isinstance(sources, list): - sources = [sources,] - lockfile_dict["_meta"]["sources"] = [ - { - "name": s["name"], - "url": s["url"], - "verify_ssl": ( - s["verify_ssl"] if isinstance(s["verify_ssl"], bool) else ( - True if s["verify_ssl"].lower() == "true" else False - ) + lockfile = Req_Lockfile.from_data(path=self.lockfile_location, data=self._lockfile, meta_from_project=False) + if lockfile._lockfile is not None: + return lockfile + if self.lockfile_exists and self.lockfile_content: + lockfile_dict = self.lockfile_content.copy() + sources = lockfile_dict.get("_meta", {}).get("sources", []) + if not sources: + sources = self.pipfile_sources + elif not isinstance(sources, list): + sources = [sources,] + lockfile_dict["_meta"]["sources"] = [ + { + "name": s["name"], + "url": s["url"], + "verify_ssl": ( + s["verify_ssl"] if isinstance(s["verify_ssl"], bool) else ( + True if s["verify_ssl"].lower() == "true" else False ) - } for s in sources - ] - _created_lockfile = Lockfile(lockfile_dict) - lockfile._lockfile = lockfile.projectfile.model = _created_lockfile - return lockfile - elif self.pipfile_exists: - from .vendor.plette.lockfiles import Lockfile, PIPFILE_SPEC_CURRENT - lockfile_dict = { - "_meta": { - "hash": {"sha256": self.calculate_pipfile_hash()}, - "pipfile-spec": PIPFILE_SPEC_CURRENT, - "sources": self.pipfile_sources, - "requires": self.parsed_pipfile.get("requires", {}) - }, - "default": self._lockfile["default"].copy(), - "develop": self._lockfile["develop"].copy() - } - lockfile._lockfile = Lockfile(lockfile_dict) - return lockfile + ) + } for s in sources + ] + _created_lockfile = Req_Lockfile.from_data( + path=self.lockfile_location, data=lockfile_dict, meta_from_project=False + ) + lockfile._lockfile = lockfile.projectfile.model = _created_lockfile + return lockfile + elif self.pipfile_exists: + lockfile_dict = { + "default": self._lockfile["default"].copy(), + "develop": self._lockfile["develop"].copy() + } + lockfile_dict.update({"_meta": self.get_lockfile_meta()}) + _created_lockfile = Req_Lockfile.from_data( + path=self.lockfile_location, data=lockfile_dict, meta_from_project=False + ) + lockfile._lockfile = _created_lockfile + return lockfile + + def get_lockfile_meta(self): + from .vendor.plette.lockfiles import PIPFILE_SPEC_CURRENT + sources = self.lockfile_content.get("_meta", {}).get("sources", []) + if not sources: + sources = self.pipfile_sources + elif not isinstance(sources, list): + sources = [sources,] + return { + "hash": {"sha256": self.calculate_pipfile_hash()}, + "pipfile-spec": PIPFILE_SPEC_CURRENT, + "sources": sources, + "requires": self.parsed_pipfile.get("requires", {}) + } def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" @@ -943,7 +956,7 @@ def pipfile_sources(self): @property def sources(self): if self.lockfile_exists and hasattr(self.lockfile_content, "keys"): - meta_ = self.lockfile_content["_meta"] + meta_ = self.lockfile_content.get("_meta", {}) sources_ = meta_.get("sources") if sources_: return sources_ diff --git a/pipenv/utils.py b/pipenv/utils.py index c9feeafdb8..3a9ef307d7 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -18,7 +18,8 @@ from vistir.misc import fs_str six.add_move(six.MovedAttribute("Mapping", "collections", "collections.abc")) -from six.moves import Mapping +six.add_move(six.MovedAttribute("Sequence", "collections", "collections.abc")) +from six.moves import Mapping, Sequence from vistir.compat import ResourceWarning @@ -1035,6 +1036,17 @@ def path_to_url(path): return Path(normalize_drive(os.path.abspath(path))).as_uri() +def get_canonical_names(packages): + """Canonicalize a list of packages and return a set of canonical names""" + from .vendor.packaging.utils import canonicalize_name + + if not isinstance(packages, Sequence): + if not isinstance(packages, six.string_types): + return packages + packages = [packages,] + return set([canonicalize_name(pkg) for pkg in packages if pkg]) + + def walk_up(bottom): """Mimic os.walk, but walk 'up' instead of down the directory tree. From: https://gist.github.com/zdavkeos/1098474 diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index edbab5bc8a..881e9ac9d9 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -2,6 +2,10 @@ __version__ = '1.2.6' import logging +import warnings + +warnings.filterwarnings("ignore", category=ResourceWarning) + logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index de8bf8ef94..1a73f98e77 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import import errno +import os import six +import sys + + +from vistir.compat import FileNotFoundError if six.PY2: @@ -15,3 +20,73 @@ def __init__(self, *args, **kwargs): class RequirementError(Exception): pass + + +class MissingParameter(Exception): + def __init__(self, param): + super(Exception, self).__init__() + print("Missing parameter: %s" % param, file=sys.stderr, flush=True) + + +class FileCorruptException(OSError): + def __init__(self, path, *args, **kwargs): + path = path + backup_path = kwargs.pop("backup_path", None) + if not backup_path and args: + args = reversed(args) + backup_path = args.pop() + if not isinstance(backup_path, six.string_types) or not os.path.exists(os.path.abspath(os.path.dirname(backup_path))): + args.append(backup_path) + backup_path = None + if args: + args = reversed(args) + self.path = path + self.backup_path = backup_path + self.show(self.path, self.backup_path) + super(OSError, self).__init__(path, *args, **kwargs) + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load file at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced." + print("The file is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class LockfileCorruptException(FileCorruptException): + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load lockfile at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced on the next lock." + print("Your lockfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class PipfileCorruptException(FileCorruptException): + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load Pipfile at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced on the next lock." + print("Your Pipfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class PipfileNotFound(FileNotFoundError): + def __init__(self, path, *args, **kwargs): + self.errno = errno.ENOENT + self.path = path + self.show(path) + super(PipfileNotFound, self).__init__(*args, **kwargs) + + @classmethod + def show(cls, path): + print("ERROR: The file could not be found: %s" % path, file=sys.stderr, flush=True) + print("Aborting...", file=sys.stderr, flush=True) diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 3e48281347..6f61f57ebc 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -9,12 +9,13 @@ import plette.lockfiles import six -from vistir.compat import Path, FileNotFoundError +from vistir.compat import Path, FileNotFoundError, JSONDecodeError from .project import ProjectFile from .requirements import Requirement from .utils import optional_instance_of +from ..exceptions import LockfileCorruptException, PipfileNotFound, MissingParameter from ..utils import is_vcs, is_editable, merge_items DEFAULT_NEWLINES = u"\n" @@ -134,7 +135,17 @@ def read_projectfile(cls, path): return pf @classmethod - def load_projectfile(cls, path, create=True): + def lockfile_from_pipfile(cls, pipfile_path): + from .pipfile import Pipfile + if os.path.isfile(pipfile_path): + if not os.path.isabs(pipfile_path): + pipfile_path = os.path.abspath(pipfile_path) + pipfile = Pipfile.load(os.path.dirname(pipfile_path)) + return plette.lockfiles.Lockfile.with_meta_from(pipfile._pipfile) + raise PipfileNotFound(pipfile_path) + + @classmethod + def load_projectfile(cls, path, create=True, data=None): """Given a path, load or create the necessary lockfile. :param str path: Path to the project root or lockfile @@ -155,8 +166,48 @@ def load_projectfile(cls, path, create=True): elif not lockfile_path.exists() and not create: raise FileNotFoundError("Lockfile does not exist: %s" % lockfile_path.as_posix()) projectfile = cls.read_projectfile(lockfile_path.as_posix()) + if not lockfile_path.exists(): + if not data: + lf = cls.lockfile_from_pipfile(project_path.joinpath("Pipfile")) + else: + lf = plette.lockfiles.Lockfile(data) + projectfile.model = lf return projectfile + @classmethod + def from_data(cls, path, data, meta_from_project=True): + """Create a new lockfile instance from a dictionary. + + :param str path: Path to the project root. + :param dict data: Data to load into the lockfile. + :param bool meta_from_project: Attempt to populate the meta section from the + project root, default True. + """ + + if path is None: + raise MissingParameter("path") + if data is None: + raise MissingParameter("data") + if not isinstance(data, dict): + raise TypeError("Expecting a dictionary for parameter 'data'") + path = os.path.abspath(str(path)) + if os.path.isdir(path): + project_path = path + elif not os.path.isdir(path) and os.path.isdir(os.path.dirname(path)): + project_path = os.path.dirname(path) + pipfile_path = os.path.join(project_path, "Pipfile") + lockfile_path = os.path.join(project_path, "Pipfile.lock") + if meta_from_project: + lockfile = cls.lockfile_from_pipfile(pipfile_path) + lockfile.update(data) + else: + lockfile = plette.lockfiles.Lockfile(data) + projectfile = ProjectFile(line_ending=DEFAULT_NEWLINES, location=lockfile_path, model=lockfile) + return cls( + projectfile=projectfile, lockfile=lockfile, + newlines=projectfile.line_ending, path=Path(projectfile.location) + ) + @classmethod def load(cls, path, create=True): """Create a new lockfile instance. @@ -170,7 +221,18 @@ def load(cls, path, create=True): :rtype: :class:`~requirementslib.models.lockfile.Lockfile` """ - projectfile = cls.load_projectfile(path, create=create) + try: + projectfile = cls.load_projectfile(path, create=create) + except JSONDecodeError as e: + path = os.path.abspath(path) + if not os.path.isdir(path): + path = os.path.dirname(path) + path = Path(os.path.join(path, "Pipfile.lock")) + formatted_path = path.as_posix() + backup_path = "%.bak" % formatted_path + LockfileCorruptException.show(formatted_path, backup_path=backup_path) + path.rename(backup_path) + cls.load(formatted_path, create=True) lockfile_path = Path(projectfile.location) creation_args = { "projectfile": projectfile, diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 58d540559e..dbb024be6b 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -23,13 +23,26 @@ is_projectfile = optional_instance_of(ProjectFile) +def reorder_source_keys(data): + for i, entry in enumerate(data["source"]): + table = tomlkit.table() + table["name"] = entry["name"] + table["url"] = entry["url"] + table["verify_ssl"] = entry["verify_ssl"] + data["source"][i] = table + return data + + class PipfileLoader(plette.pipfiles.Pipfile): @classmethod def validate(cls, data): for key, klass in plette.pipfiles.PIPFILE_SECTIONS.items(): if key not in data or key == "source": continue - klass.validate(data[key]) + try: + klass.validate(data[key]) + except Exception: + pass @classmethod def load(cls, f, encoding=None): @@ -37,19 +50,26 @@ def load(cls, f, encoding=None): if encoding is not None: content = content.decode(encoding) _data = tomlkit.loads(content) + _data["source"] = _data.get("source", []) + _data.get("sources", []) + _data = reorder_source_keys(_data) if "source" not in _data: - if "sources" in _data: - _data["source"] = _data["sources"] - content = tomlkit.dumps(_data) - else: - # HACK: There is no good way to prepend a section to an existing - # TOML document, but there's no good way to copy non-structural - # content from one TOML document to another either. Modify the - # TOML content directly, and load the new in-memory document. - sep = "" if content.startswith("\n") else "\n" - content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content data = tomlkit.loads(content) - return cls(data) + data = reorder_source_keys(data) + instance = cls(data) + new_data = reorder_source_keys(instance._data) + instance._data = new_data + return instance + + def __getattribute__(self, key): + if key == "source": + return self._data[key] + return super(PipfileLoader, self).__getattribute__(key) @attr.s(slots=True) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index a36e5ba46c..514114298c 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -36,6 +36,7 @@ add_ssh_scheme_to_git_uri, strip_ssh_from_git_uri, ) +from .setup_info import SetupInfo from .utils import ( HASH_STRING, build_vcs_link, @@ -46,7 +47,6 @@ init_requirement, is_pinned_requirement, make_install_requirement, - optional_instance_of, parse_extras, specs_to_string, split_markers_from_line, @@ -56,6 +56,7 @@ validate_vcs, normalize_name, create_link, + get_pyproject ) @@ -148,13 +149,19 @@ class FileRequirement(object): uri = attr.ib() #: Link object representing the package to clone link = attr.ib() + #: PyProject Requirements + pyproject_requires = attr.ib(default=attr.Factory(list)) + #: PyProject Build System + pyproject_backend = attr.ib(default=None) + #: PyProject Path + pyproject_path = attr.ib(default=None) _has_hashed_name = attr.ib(default=False) #: Package name name = attr.ib() #: A :class:`~pkg_resources.Requirement` isntance req = attr.ib() - #: Whether this is a direct url requirement - is_direct = attr.ib(default=False) + #: Setup metadata e.g. dependencies + setup_info = attr.ib(default=None) @classmethod def get_link_from_line(cls, line): @@ -258,107 +265,110 @@ def get_link_from_line(cls, line): return LinkInfo(vcs_type, prefer, relpath, path, uri, link) - def __attrs_post_init__(self): - if self.req and getattr(self.req, "url"): - self.uri = self.req.url + @property + def setup_py_dir(self): + if self.setup_path: + return os.path.dirname(os.path.abspath(self.setup_path)) + + @property + def dependencies(self): + build_deps = [] + setup_deps = [] + deps = {} + if self.setup_info: + setup_info = self.setup_info.as_dict() + deps.update(setup_info.get("requires", {})) + setup_deps.extend(setup_info.get("setup_requires", [])) + build_deps.extend(setup_info.get("build_requires", [])) + if self.pyproject_requires: + build_deps.extend(self.pyproject_requires) + return deps, setup_deps, build_deps @uri.default def get_uri(self): if self.path and not self.uri: self._uri_scheme = "path" - self.uri = pip_shims.shims.path_to_url(os.path.abspath(self.path)) + return pip_shims.shims.path_to_url(os.path.abspath(self.path)) elif self.req and getattr(self.req, "url"): - self.uri = self.req.url + return self.req.url @name.default def get_name(self): loc = self.path or self.uri - if loc: - self._uri_scheme = "path" if self.path else "uri" + if loc and not self._uri_scheme: + self._uri_scheme = "path" if self.path else "file" name = None - if getattr(self, "req", None) and getattr(self.req, "name"): - return self.req.name - if self.link and self.link.egg_fragment: + if getattr(self, "req", None) and getattr(self.req, "name") and self.req.name is not None: + if self.is_direct_url: + return self.req.name + if self.link and self.link.egg_fragment and not self._has_hashed_name: return self.link.egg_fragment elif self.link and self.link.is_wheel: from pip_shims import Wheel - + self._has_hashed_name = False return Wheel(self.link.filename).name - if ( - self._uri_scheme != "uri" - and self.path - and self.setup_path - and self.setup_path.exists() - ): - from setuptools.dist import distutils - - old_curdir = os.path.abspath(os.getcwd()) - try: - os.chdir(str(self.setup_path.parent)) - dist = distutils.core.run_setup(self.setup_path.as_posix()) - name = dist.get_name() - except (FileNotFoundError, IOError) as e: - dist = None - except Exception as e: - from pip_shims.shims import make_abstract_dist - - try: - if not isinstance(Path, self.path): - _path = Path(self.path) - else: - _path = self.path - if self.editable: - _ireq = pip_shims.shims.install_req_from_editable(_path.as_uri()) - else: - _ireq = pip_shims.shims.install_req_from_line(_path.as_posix()) - dist = make_abstract_dist(_ireq).get_dist() - name = dist.project_name - except (TypeError, ValueError, AttributeError) as e: - dist = None - finally: - os.chdir(old_curdir) + elif self.link and ((self.link.scheme == "file" or self.editable) or ( + self.path and self.setup_path and os.path.isfile(str(self.setup_path)) + )): + if self.editable: + line = pip_shims.shims.path_to_url(self.setup_py_dir) + _ireq = pip_shims.shims.install_req_from_editable(line) + else: + _ireq = pip_shims.shims.install_req_from_line(Path(self.setup_py_dir).as_posix()) + from .setup_info import SetupInfo + subdir = getattr(self, "subdirectory", None) + setupinfo = SetupInfo.from_ireq(_ireq, subdir=subdir) + if setupinfo: + self.setup_info = setupinfo + setupinfo_dict = setupinfo.as_dict() + setup_name = setupinfo_dict.get("name", None) + if setup_name: + name = setup_name + self._has_hashed_name = False + version = setupinfo_dict.get("version") + if version and not self.version: + self.version = version + build_requires = setupinfo_dict.get("build_requires") + build_backend = setupinfo_dict.get("build_backend") + if build_requires and not self.pyproject_requires: + self.pyproject_requires = build_requires + if build_backend and not self.pyproject_backend: + self.pyproject_backend = build_backend hashed_loc = hashlib.sha256(loc.encode("utf-8")).hexdigest() hashed_name = hashed_loc[-7:] - if not name or name == "UNKNOWN": + if not name or name.lower() == "unknown": self._has_hashed_name = True name = hashed_name - if self.link and not self._has_hashed_name: + else: + self._has_hashed_name = False + name_in_link = getattr(self.link, "egg_fragment", "") if self.link else "" + if not self._has_hashed_name and name_in_link != name: self.link = create_link("{0}#egg={1}".format(self.link.url, name)) return name @link.default def get_link(self): target = "{0}".format(self.uri) - if hasattr(self, "name"): + if hasattr(self, "name") and not self._has_hashed_name: target = "{0}#egg={1}".format(target, self.name) link = create_link(target) return link @req.default def get_requirement(self): - if self.link.is_artifact and not self.editable: - if self._uri_scheme == "uri": - if self.name: - req_str = "{0} @ {1}".format(self.name, self.link.url_without_fragment) - else: - req_str = "{0}".format(self.link.url_without_fragment) - req = init_requirement(req_str) - req.line = req_str - else: - req = init_requirement(normalize_name(self.name)) - else: - req = init_requirement(normalize_name(self.name)) - req.editable = False - req.line = self.link.url_without_fragment - if self.path and self.link and self.link.scheme.startswith("file"): - req.local_file = True - req.path = self.path + req = init_requirement(normalize_name(self.name)) + req.editable = False + req.line = self.link.url_without_fragment + if self.path and self.link and self.link.scheme.startswith("file"): + req.local_file = True + req.path = self.path + if self.editable: req.url = None - self._uri_scheme = "file" else: - req.local_file = False - req.path = None - if not getattr(req, "url", None): + req.url = self.link.url_without_fragment + else: + req.local_file = False + req.path = None req.url = self.link.url_without_fragment if self.editable: req.editable = True @@ -389,6 +399,99 @@ def formatted_path(self): return path.as_posix() return + @classmethod + def create( + cls, path=None, uri=None, editable=False, extras=None, link=None, vcs_type=None, + name=None, req=None, line=None, uri_scheme=None, setup_path=None, relpath=None + ): + import pip_shims.shims + if relpath and not path: + path = relpath + if not path and uri and link.scheme == "file": + path = os.path.abspath(pip_shims.shims.url_to_path(unquote(uri))) + try: + path = get_converted_relative_path(path) + except ValueError: # Vistir raises a ValueError if it can't make a relpath + path = path + if line and not (uri_scheme and uri and link): + vcs_type, uri_scheme, relpath, path, uri, link = cls.get_link_from_line(line) + if not uri_scheme: + uri_scheme = "path" if path else "file" + if path and not uri: + uri = unquote(pip_shims.shims.path_to_url(os.path.abspath(path))) + if not link: + link = create_link(uri) + if not uri: + uri = unquote(link.url_without_fragment) + if not extras: + extras = [] + pyproject_path = None + if path is not None: + pyproject_requires = get_pyproject(os.path.abspath(path)) + pyproject_backend = None + pyproject_requires = None + if pyproject_requires is not None: + pyproject_requires, pyproject_backend = pyproject_requires + if path: + pyproject_path = Path(path).joinpath("pyproject.toml") + if not pyproject_path.exists(): + pyproject_path = None + if not setup_path and path is not None: + setup_path = Path(path).joinpath("setup.py") + if setup_path and isinstance(setup_path, Path): + setup_path = setup_path.as_posix() + creation_kwargs = { + "editable": editable, + "extras": extras, + "pyproject_path": pyproject_path, + "setup_path": setup_path if setup_path else None, + "uri_scheme": uri_scheme, + "link": link, + "uri": uri, + "pyproject_requires": pyproject_requires, + "pyproject_backend": pyproject_backend + } + if vcs_type: + creation_kwargs["vcs_type"] = vcs_type + _line = None + if not name: + import pip_shims.shims + _line = unquote(link.url_without_fragment) if link.url else uri + if editable: + ireq = pip_shims.shims.install_req_from_editable(_line) + else: + _line = path if (uri_scheme and uri_scheme == "path") else _line + ireq = pip_shims.shims.install_req_from_line(_line) + setup_info = SetupInfo.from_ireq(ireq) + setupinfo_dict = setup_info.as_dict() + setup_name = setupinfo_dict.get("name", None) + if setup_name: + name = setup_name + build_requires = setupinfo_dict.get("build_requires", []) + build_backend = setupinfo_dict.get("build_backend", []) + if not creation_kwargs.get("pyproject_requires") and build_requires: + creation_kwargs["pyproject_requires"] = build_requires + if not creation_kwargs.get("pyproject_backend") and build_backend: + creation_kwargs["pyproject_backend"] = build_backend + creation_kwargs["setup_info"] = setup_info + if path or relpath: + creation_kwargs["path"] = relpath if relpath else path + if req: + creation_kwargs["req"] = req + if creation_kwargs.get("req") and line and not getattr(creation_kwargs["req"], "line", None): + creation_kwargs["req"].line = line + if name: + creation_kwargs["name"] = name + cls_inst = cls(**creation_kwargs) + if not _line: + if editable and uri_scheme == "path": + _line = relpath if relpath else path + else: + _line = unquote(cls_inst.link.url_without_fragment) or cls_inst.uri + _line = "{0}#egg={1}".format(line, cls_inst.name) if not cls_inst._has_hashed_name else _line + cls_inst.req.line = line if line else _line + return cls_inst + @classmethod def from_line(cls, line): line = line.strip('"').strip("'") @@ -410,7 +513,6 @@ def from_line(cls, line): name = getattr(req, "name", None) line = getattr(req, "url", None) vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) - setup_path = Path(path) / "setup.py" if path else None arg_dict = { "path": relpath if relpath else path, "uri": unquote(link.url_without_fragment), @@ -418,6 +520,7 @@ def from_line(cls, line): "editable": editable, "setup_path": setup_path, "uri_scheme": prefer, + "line": line } if link and link.is_wheel: from pip_shims import Wheel @@ -427,10 +530,7 @@ def from_line(cls, line): arg_dict["name"] = name elif link.egg_fragment: arg_dict["name"] = link.egg_fragment - if req: - arg_dict["req"] = req - created = cls(**arg_dict) - return created + return cls.create(**arg_dict) @classmethod def from_pipfile(cls, name, pipfile): @@ -466,9 +566,6 @@ def from_pipfile(cls, name, pipfile): if not uri: uri = pip_shims.shims.path_to_url(path) link = create_link(uri) - req = None - if link.is_artifact and not link.is_wheel and not link.scheme.startswith("file"): - req = init_requirement("{0}@{1}".format(name, uri)) arg_dict = { "name": name, "path": path, @@ -477,13 +574,14 @@ def from_pipfile(cls, name, pipfile): "link": link, "uri_scheme": uri_scheme, } - if req: - arg_dict["req"] = req - return cls(**arg_dict) + if link.scheme != "file" and not pipfile.get("editable", False): + arg_dict["line"] = "{0}@ {1}".format(name, link.url_without_fragment) + return cls.create(**arg_dict) @property def line_part(self): if self._uri_scheme and self._uri_scheme == "path": + # We may need any one of these for passing to pip seed = self.path or unquote(self.link.url_without_fragment) or self.uri elif (self._uri_scheme and self._uri_scheme == "file") or ( (self.link.is_artifact or self.link.is_wheel) and self.link.url @@ -491,16 +589,16 @@ def line_part(self): seed = unquote(self.link.url_without_fragment) or self.uri # add egg fragments to remote artifacts (valid urls only) if not self._has_hashed_name and self.is_remote_artifact: - if not self.link.is_wheel and self.link.is_artifact: - seed = "{0}@{1}".format(self.name, seed) - else: - seed += "#egg={0}".format(self.name) + seed += "#egg={0}".format(self.name) editable = "-e " if self.editable else "" return "{0}{1}".format(editable, seed) @property def pipfile_part(self): - excludes = ["_base_line", "_has_hashed_name", "setup_path"] + excludes = [ + "_base_line", "_has_hashed_name", "setup_path", "pyproject_path", + "pyproject_requires", "pyproject_backend", "setup_info" + ] filter_func = lambda k, v: bool(v) is True and k.name not in excludes pipfile_dict = attr.asdict(self, filter=filter_func).copy() name = pipfile_dict.pop("name") @@ -687,10 +785,19 @@ def get_vcs_repo(self, src_dir=None): ) if not self.is_local: vcsrepo.obtain() + pyproject_info = None if self.subdirectory: self.setup_path = os.path.join(checkout_dir, self.subdirectory, "setup.py") + self.pyproject_path = os.path.join(checkout_dir, self.subdirectory, "pyproject.toml") + pyproject_info = get_pyproject(os.path.join(checkout_dir, self.subdirectory)) else: self.setup_path = os.path.join(checkout_dir, "setup.py") + self.pyproject_path = os.path.join(checkout_dir, "pyproject.toml") + pyproject_info = get_pyproject(checkout_dir) + if pyproject_info is not None: + pyproject_requires, pyproject_backend = pyproject_info + self.pyproject_requires = pyproject_requires + self.pyproject_backend = pyproject_backend return vcsrepo def get_commit_hash(self): @@ -846,7 +953,10 @@ def _choose_vcs_source(pipfile): @property def pipfile_part(self): - excludes = ["_repo", "_base_line", "setup_path", "_has_hashed_name"] + excludes = [ + "_repo", "_base_line", "setup_path", "_has_hashed_name", "pyproject_path", + "pyproject_requires", "pyproject_backend", "setup_info" + ] filter_func = lambda k, v: bool(v) is True and k.name not in excludes pipfile_dict = attr.asdict(self, filter=filter_func).copy() if "vcs" in pipfile_dict: @@ -952,7 +1062,6 @@ def from_line(cls, line): line = line.split(" ", 1)[1] if editable else line line, markers = split_markers_from_line(line) line, extras = pip_shims.shims._strip_extras(line) - specifiers = "" if extras: extras = parse_extras(extras) line = line.strip('"').strip("'").strip() @@ -984,7 +1093,6 @@ def from_line(cls, line): spec_idx = min((line.index(match) for match in spec_matches)) name = line[:spec_idx] version = line[spec_idx:] - specifiers = version if not extras: name, extras = pip_shims.shims._strip_extras(name) if extras: @@ -995,7 +1103,7 @@ def from_line(cls, line): req_markers = None if markers: req_markers = PackagingRequirement("fakepkg; {0}".format(markers)) - r.req.marker = getattr(req_markers, "marker", None) + r.req.marker = getattr(req_markers, "marker", None) if req_markers else None r.req.local_file = getattr(r.req, "local_file", False) name = getattr(r.req, "name", None) if not name: @@ -1021,7 +1129,15 @@ def from_line(cls, line): args["extras"] = sorted(dedup([extra.lower() for extra in r.extras])) if hashes: args["hashes"] = hashes - return cls(**args) + cls_inst = cls(**args) + if not cls_inst.is_named and (not cls_inst.editable or cls_inst.req._has_hashed_name): + old_name = cls_inst.req.req.name or cls_inst.req.name + info_dict = cls_inst.run_requires() + calced_name = info_dict.get("name", old_name) + if old_name != calced_name: + cls_inst.req.req.line.replace(old_name, calced_name) + cls_inst.name = cls_inst.req.name = calced_name + return cls_inst @classmethod def from_ireq(cls, ireq): @@ -1074,6 +1190,22 @@ def from_pipfile(cls, name, pipfile): cls_inst = cls(**args) if cls_inst.is_named: cls_inst.req.req.line = cls_inst.as_line() + old_name = cls_inst.req.req.name or cls_inst.req.name + if not cls_inst.is_named and not cls_inst.editable and not name: + if cls_inst.is_vcs: + import pip_shims.shims + ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) + info = SetupInfo.from_ireq(ireq) + if info is not None: + info_dict = info.as_dict() + cls_inst.req.setup_info = info + else: + info_dict = {} + else: + info_dict = cls_inst.run_requires() + found_name = info_dict.get("name", old_name) + if old_name != found_name: + cls_inst.req.req.line.replace(old_name, found_name) return cls_inst def as_line( @@ -1159,6 +1291,10 @@ def get_requirement(self): def constraint_line(self): return self.as_line() + @property + def is_direct_url(self): + return self.is_file_or_url and self.req.is_direct_url + def as_pipfile(self): good_keys = ( "hashes", @@ -1294,6 +1430,26 @@ def find_all_matches(self, sources=None, finder=None): finder = get_finder(sources=sources) return find_all_matches(finder, self.as_ireq()) + def run_requires(self, sources=None, finder=None): + if self.req and self.req.setup_info is not None: + info_dict = self.req.setup_info.as_dict() + else: + from .setup_info import SetupInfo + if not finder: + from .dependencies import get_finder + finder = get_finder(sources=sources) + info = SetupInfo.from_requirement(self, finder=finder) + if info is None: + return {} + info_dict = info.get_info() + if self.req and not self.req.setup_info: + self.req.setup_info = info + if self.req._has_hashed_name and info_dict.get("name"): + self.req.name = self.name = info_dict["name"] + if self.req.req.name != info_dict["name"]: + self.req.req.name = info_dict["name"] + return info_dict + def merge_markers(self, markers): if not isinstance(markers, Marker): markers = Marker(markers) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py new file mode 100644 index 0000000000..319dd6bdcb --- /dev/null +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -0,0 +1,378 @@ +# -*- coding=utf-8 -*- +import configparser +import contextlib +import os + +import attr +import packaging.version +import packaging.specifiers +import packaging.utils + +try: + from setuptools.dist import distutils +except ImportError: + import distutils + +from appdirs import user_cache_dir +from six.moves.urllib.parse import unquote +from vistir.compat import Path +from vistir.contextmanagers import cd +from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p + +from .utils import init_requirement, get_pyproject + +try: + from os import scandir +except ImportError: + from scandir import scandir + + +CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) + + +@contextlib.contextmanager +def _suppress_distutils_logs(): + """Hack to hide noise generated by `setup.py develop`. + + There isn't a good way to suppress them now, so let's monky-patch. + See https://bugs.python.org/issue25392. + """ + + f = distutils.log.Log._log + + def _log(log, level, msg, args): + if level >= distutils.log.ERROR: + f(log, level, msg, args) + + distutils.log.Log._log = _log + yield + distutils.log.Log._log = f + + +@ensure_mkdir_p(mode=0o775) +def _get_src_dir(): + src = os.environ.get("PIP_SRC") + if src: + return src + virtual_env = os.environ.get("VIRTUAL_ENV") + if virtual_env: + return os.path.join(virtual_env, "src") + return os.path.join(os.getcwd(), "src") # Match pip's behavior. + + +def _prepare_wheel_building_kwargs(ireq): + download_dir = os.path.join(CACHE_DIR, "pkgs") + mkdir_p(download_dir) + + wheel_download_dir = os.path.join(CACHE_DIR, "wheels") + mkdir_p(wheel_download_dir) + + if ireq.source_dir is not None: + src_dir = ireq.source_dir + elif ireq.editable: + src_dir = _get_src_dir() + else: + src_dir = create_tracked_tempdir(prefix="reqlib-src") + + # This logic matches pip's behavior, although I don't fully understand the + # intention. I guess the idea is to build editables in-place, otherwise out + # of the source tree? + if ireq.editable: + build_dir = src_dir + else: + build_dir = create_tracked_tempdir(prefix="reqlib-build") + + return { + "build_dir": build_dir, + "src_dir": src_dir, + "download_dir": download_dir, + "wheel_download_dir": wheel_download_dir, + } + + +def iter_egginfos(path, pkg_name=None): + for entry in scandir(path): + if entry.is_dir(): + if not entry.name.endswith("egg-info"): + for dir_entry in iter_egginfos(entry.path, pkg_name=pkg_name): + yield dir_entry + elif pkg_name is None or entry.name.startswith(pkg_name): + yield entry + + +def find_egginfo(target, pkg_name=None): + egg_dirs = (egg_dir for egg_dir in iter_egginfos(target, pkg_name=pkg_name)) + if pkg_name: + return next(iter(egg_dirs), None) + else: + for egg_dir in egg_dirs: + yield egg_dir + + +def get_metadata(path, pkg_name=None): + if pkg_name: + pkg_name = packaging.utils.canonicalize_name(pkg_name) + egg_dir = next(iter(find_egginfo(path, pkg_name=pkg_name)), None) + if egg_dir is not None: + import pkg_resources + + egg_dir = os.path.abspath(egg_dir) + base_dir = os.path.dirname(egg_dir) + path_metadata = pkg_resources.PathMetadata(base_dir, egg_dir) + dist = next( + iter(pkg_resources.distributions_from_metadata(path_metadata.egg_info)), + None, + ) + if dist: + requires = dist.requires() + dep_map = dist._build_dep_map() + deps = [] + for k in dep_map.keys(): + if k is None: + deps.extend(dep_map.get(k)) + continue + else: + _deps = dep_map.get(k) + k = k.replace(":", "; ") + _deps = [ + pkg_resources.Requirement.parse("{0}{1}".format(str(req), k)) + for req in _deps + ] + deps.extend(_deps) + return { + "name": dist.project_name, + "version": dist.version, + "requires": requires, + } + + +@attr.s(slots=True) +class SetupInfo(object): + name = attr.ib(type=str, default=None) + base_dir = attr.ib(type=Path, default=None) + version = attr.ib(type=packaging.version.Version, default=None) + extras = attr.ib(type=list, default=attr.Factory(list)) + requires = attr.ib(type=dict, default=attr.Factory(dict)) + build_requires = attr.ib(type=list, default=attr.Factory(list)) + build_backend = attr.ib(type=list, default=attr.Factory(list)) + setup_requires = attr.ib(type=dict, default=attr.Factory(list)) + python_requires = attr.ib(type=packaging.specifiers.SpecifierSet, default=None) + extras = attr.ib(type=dict, default=attr.Factory(dict)) + setup_cfg = attr.ib(type=Path, default=None) + setup_py = attr.ib(type=Path, default=None) + pyproject = attr.ib(type=Path, default=None) + ireq = attr.ib(default=None) + extra_kwargs = attr.ib(default=attr.Factory(dict), type=dict) + + def parse_setup_cfg(self): + if self.setup_cfg is not None and self.setup_cfg.exists(): + default_opts = { + "metadata": {"name": "", "version": ""}, + "options": { + "install_requires": "", + "python_requires": "", + "build_requires": "", + "setup_requires": "", + "extras": "", + }, + } + parser = configparser.ConfigParser(default_opts) + parser.read(self.setup_cfg.as_posix()) + if parser.has_option("metadata", "name"): + name = parser.get("metadata", "name") + if not self.name and name is not None: + self.name = name + if parser.has_option("metadata", "version"): + version = parser.get("metadata", "version") + if not self.version and version is not None: + self.version = version + if parser.has_option("options", "install_requires"): + self.requires.update( + { + dep.strip(): init_requirement(dep.strip()) + for dep in parser.get("options", "install_requires").split("\n") + if dep + } + ) + if parser.has_option("options", "python_requires"): + python_requires = parser.get("options", "python_requires") + if python_requires and not self.python_requires: + self.python_requires = python_requires + if parser.has_option("options", "extras_require"): + self.extras.update( + { + section: [ + dep.strip() + for dep in parser.get( + "options.extras_require", section + ).split("\n") + if dep + ] + for section in parser.options("options.extras_require") + } + ) + + def run_setup(self): + if self.setup_py is not None and self.setup_py.exists(): + with cd(self.setup_py.parent), _suppress_distutils_logs(): + from setuptools.dist import distutils + + dist = distutils.core.run_setup( + self.setup_py.as_posix(), ["egg_info", "--egg-base", self.base_dir] + ) + name = dist.get_name() + if name: + self.name = name + if dist.python_requires and not self.python_requires: + self.python_requires = packaging.specifiers.SpecifierSet( + dist.python_requires + ) + if dist.extras_require and not self.extras: + self.extras = dist.extras_require + install_requires = dist.get_requires() + if not install_requires: + install_requires = dist.install_requires + if install_requires and not self.requires: + requirements = [init_requirement(req) for req in install_requires] + self.requires.update({req.key: req for req in requirements}) + if dist.setup_requires and not self.setup_requires: + self.setup_requires = dist.setup_requires + if not self.version: + self.version = dist.get_version() + + def get_egg_metadata(self): + if self.setup_py is not None and self.setup_py.exists(): + metadata = get_metadata(self.setup_py.parent.as_posix(), pkg_name=self.name) + if metadata: + if not self.name: + self.name = metadata.get("name", self.name) + if not self.version: + self.version = metadata.get("version", self.version) + self.requires.update( + {req.key: req for req in metadata.get("requires", {})} + ) + + def run_pyproject(self): + if self.pyproject and self.pyproject.exists(): + result = get_pyproject(self.pyproject.parent) + if result is not None: + requires, backend = result + if backend: + self.build_backend = backend + if requires and not self.build_requires: + self.build_requires = requires + + def get_info(self): + if self.setup_cfg and self.setup_cfg.exists(): + self.parse_setup_cfg() + if self.setup_py and self.setup_py.exists(): + if not self.requires or not self.name: + try: + self.run_setup() + except Exception as e: + self.get_egg_metadata() + if not self.requires or not self.name: + self.get_egg_metadata() + + if self.pyproject and self.pyproject.exists(): + self.run_pyproject() + return self.as_dict() + + def as_dict(self): + prop_dict = { + "name": self.name, + "version": self.version, + "base_dir": self.base_dir, + "ireq": self.ireq, + "build_backend": self.build_backend, + "build_requires": self.build_requires, + "requires": self.requires, + "setup_requires": self.setup_requires, + "python_requires": self.python_requires, + "extras": self.extras, + "extra_kwargs": self.extra_kwargs, + "setup_cfg": self.setup_cfg, + "setup_py": self.setup_py, + "pyproject": self.pyproject, + } + return {k: v for k, v in prop_dict.items() if v} + + @classmethod + def from_requirement(cls, requirement, finder=None): + ireq = requirement.as_ireq() + subdir = getattr(requirement.req, "subdirectory", None) + return cls.from_ireq(ireq, subdir=subdir, finder=finder) + + @classmethod + def from_ireq(cls, ireq, subdir=None, finder=None): + import pip_shims.shims + + if ireq.link.is_wheel: + return + if not finder: + from .dependencies import get_finder + + finder = get_finder() + kwargs = _prepare_wheel_building_kwargs(ireq) + ireq.populate_link(finder, False, False) + ireq.ensure_has_source_dir(kwargs["build_dir"]) + if not ( + ireq.editable + and pip_shims.shims.is_file_url(ireq.link) + and not ireq.link.is_artifact + ): + if ireq.is_wheel: + only_download = True + download_dir = kwargs["wheel_download_dir"] + else: + only_download = False + download_dir = kwargs["download_dir"] + ireq_src_dir = None + if ireq.link.scheme == "file": + path = pip_shims.shims.url_to_path(unquote(ireq.link.url_without_fragment)) + if pip_shims.shims.is_installable_dir(path): + ireq_src_dir = path + if not ireq.editable or not (pip_shims.is_file_url(ireq.link) and ireq_src_dir): + pip_shims.shims.unpack_url( + ireq.link, + ireq.source_dir, + download_dir, + only_download=only_download, + session=finder.session, + hashes=ireq.hashes(False), + progress_bar="off", + ) + if ireq.editable: + created = cls.create( + ireq.source_dir, subdirectory=subdir, ireq=ireq, kwargs=kwargs + ) + else: + build_dir = ireq.build_location(kwargs["build_dir"]) + ireq._temp_build_dir.path = kwargs["build_dir"] + created = cls.create( + build_dir, subdirectory=subdir, ireq=ireq, kwargs=kwargs + ) + created.get_info() + return created + + @classmethod + def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): + if not base_dir or base_dir is None: + return + + creation_kwargs = {"extra_kwargs": kwargs} + if not isinstance(base_dir, Path): + base_dir = Path(base_dir) + creation_kwargs["base_dir"] = base_dir.as_posix() + pyproject = base_dir.joinpath("pyproject.toml") + + if subdirectory is not None: + base_dir = base_dir.joinpath(subdirectory) + setup_py = base_dir.joinpath("setup.py") + setup_cfg = base_dir.joinpath("setup.cfg") + creation_kwargs["pyproject"] = pyproject + creation_kwargs["setup_py"] = setup_py + creation_kwargs["setup_cfg"] = setup_cfg + if ireq: + creation_kwargs["ireq"] = ireq + return cls(**creation_kwargs) diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index aa7ffd681c..2b47ee9bb0 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import +import io import os import sys @@ -9,6 +10,7 @@ from operator import attrgetter import six +import tomlkit from attr import validators from first import first @@ -17,7 +19,7 @@ from vistir.misc import dedup -from ..utils import SCHEME_LIST, VCS_LIST, is_star, strip_ssh_from_git_uri, add_ssh_scheme_to_git_uri +from ..utils import SCHEME_LIST, VCS_LIST, is_star, add_ssh_scheme_to_git_uri HASH_STRING = " --hash={0}" @@ -93,6 +95,7 @@ def build_vcs_link(vcs, uri, name=None, ref=None, subdirectory=None, extras=None if extras: extras = extras_to_string(extras) uri = "{0}{1}".format(uri, extras) + # if subdirectory: if subdirectory: uri = "{0}&subdirectory={1}".format(uri, subdirectory) return create_link(uri) @@ -112,6 +115,42 @@ def get_version(pipfile_entry): return "" +def get_pyproject(path): + from vistir.compat import Path + if not path: + return + if not isinstance(path, Path): + path = Path(path) + if not path.is_dir(): + path = path.parent + pp_toml = path.joinpath("pyproject.toml") + setup_py = path.joinpath("setup.py") + if not pp_toml.exists(): + if setup_py.exists(): + return None + else: + pyproject_data = {} + with io.open(pp_toml.as_posix(), encoding="utf-8") as fh: + pyproject_data = tomlkit.loads(fh.read()) + build_system = pyproject_data.get("build-system", None) + if build_system is None: + if setup_py.exists(): + requires = ["setuptools", "wheel"] + backend = "setuptools.build_meta" + else: + requires = ["setuptools>=38.2.5", "wheel"] + backend = "setuptools.build_meta" + build_system = { + "requires": requires, + "build-backend": backend + } + pyproject_data["build_system"] = build_system + else: + requires = build_system.get("requires") + backend = build_system.get("build-backend") + return (requires, backend) + + def split_markers_from_line(line): """Split markers from a dependency""" if not any(line.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index dd8cc3a449..6a15db3f85 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -4,7 +4,6 @@ import pip_shims - @attr.s class VCSRepository(object): url = attr.ib() @@ -32,7 +31,7 @@ def is_local(self): def obtain(self): if (os.path.exists(self.checkout_directory) and not - self.repo_instance.is_repository_directory(self.checkout_directory)): + self.repo_instance.is_repository_directory(self.checkout_directory)): self.repo_instance.unpack(self.checkout_directory) elif not os.path.exists(self.checkout_directory): self.repo_instance.obtain(self.checkout_directory) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 0ab0ab22ed..6ed95b3e7c 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -56,8 +56,17 @@ def check_github_ssh(): return res +def check_for_mercurial(): + c = delegator.run("hg --help") + if c.return_code != 0: + return False + else: + return True + + TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') +WE_HAVE_HG = check_for_mercurial() prepare_pypi_packages(PYPI_VENDOR_DIR) @@ -66,6 +75,8 @@ def pytest_runtest_setup(item): pytest.skip('requires internet') if item.get_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS: pytest.skip('requires github ssh') + if item.get_marker('needs_hg') is not None and not WE_HAVE_HG: + pytest.skip('requires mercurial') @pytest.fixture @@ -100,6 +111,8 @@ def isolate(pathlib_tmpdir): os.environ["GIT_AUTHOR_EMAIL"] = fs_str("pipenv@pipenv.org") mkdir_p(os.path.join(home_dir, ".virtualenvs")) os.environ["WORKON_HOME"] = fs_str(os.path.join(home_dir, ".virtualenvs")) + global WE_HAVE_GITHUB_SSH_KEYS + WE_HAVE_GITHUB_SSH_KEYS = check_github_ssh() WE_HAVE_INTERNET = check_internet() diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index e19a140027..5f493cac9b 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -84,7 +84,7 @@ def test_uninstall_all_local_files(PipenvInstance, testsroot): # Not sure where travis/appveyor run tests from source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance() as p: + with PipenvInstance(chdir=True) as p: shutil.copy(source_path, os.path.join(p.path, file_name)) os.mkdir(os.path.join(p.path, "requests")) c = p.pipenv("install {}".format(file_name)) @@ -92,7 +92,9 @@ def test_uninstall_all_local_files(PipenvInstance, testsroot): c = p.pipenv("uninstall --all") assert c.return_code == 0 assert "requests" in c.out - assert "requests" not in p.pipfile["packages"] + # Uninstall --all is not supposed to remove things from the pipfile + # Note that it didn't before, but that instead local filenames showed as hashes + assert "requests" in p.pipfile["packages"] @pytest.mark.run From dec7be54d716d078390232c5ac5a78991ca3e0b1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 16:39:23 -0500 Subject: [PATCH 04/23] Introduce `pipenv.environments.Environment` - Specific construct for isolationg operations Signed-off-by: Dan Ryan --- pipenv/core.py | 107 ++++---- pipenv/environment.py | 618 ++++++++++++++++++++++++++++++++++++++++++ pipenv/project.py | 176 +++--------- 3 files changed, 713 insertions(+), 188 deletions(-) create mode 100644 pipenv/environment.py diff --git a/pipenv/core.py b/pipenv/core.py index 3cbd1645f1..021b0f3ca1 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -915,7 +915,15 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): project_file_name = os.path.join(project.virtualenv_location, ".project") with open(project_file_name, "w") as f: f.write(vistir.misc.fs_str(project.project_directory)) - fix_venv_site(project.env_paths["lib"]) + from .environment import Environment + sources = project.pipfile_sources + project._environment = Environment( + prefix=project.get_location_for_virtualenv(), + is_venv=True, + sources=sources, + pipfile=project.parsed_pipfile + ) + project._environment.add_dist("pipenv") # Say where the virtualenv is. do_where(virtualenv=True, bare=False) @@ -1129,7 +1137,7 @@ def do_purge(bare=False, downloads=False, allow_global=False): # Remove comments from the output, if any. installed = set([ - pep423_name(pkg.project_name) for pkg in project.get_installed_packages() + pep423_name(pkg.project_name) for pkg in project.environment.get_installed_packages() ]) bad_pkgs = set([pep423_name(pkg) for pkg in BAD_PACKAGES]) # Remove setuptools, pip, etc from targets for removal @@ -1662,7 +1670,7 @@ def do_outdated(pypi_mirror=None): packages = {} package_info = namedtuple("PackageInfo", ["name", "installed", "available"]) - installed_packages = project.get_installed_packages() + installed_packages = project.environment.get_installed_packages() outdated_packages = { canonicalize_name(pkg.project_name): package_info (pkg.project_name, pkg.parsed_version, pkg.latest_version) @@ -1916,7 +1924,14 @@ def do_install( # make a tuple of (display_name, entry) pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] - + if not system and not project.virtualenv_exists: + with create_spinner("Creating virtualenv...") as sp: + try: + do_create_virtualenv(pypi_mirror=pypi_mirror) + except KeyboardInterrupt: + cleanup_virtualenv(bare=(not environments.is_verbose())) + sys.exit(1) + sp.write_err("Ok...") for pkg_line in pkg_list: click.echo( crayons.normal( @@ -1925,8 +1940,7 @@ def do_install( ) ) # pip install: - with vistir.contextmanagers.temp_environ(), \ - create_spinner("Installing...") as sp: + with vistir.contextmanagers.temp_environ(), create_spinner("Installing...") as sp: os.environ["PIP_USER"] = vistir.compat.fs_str("0") try: pkg_requirement = Requirement.from_line(pkg_line) @@ -2055,30 +2069,17 @@ def do_uninstall( package_map = { canonicalize_name(p): p for p in packages if p } - installed_package_names = set([ - canonicalize_name(pkg.project_name) for pkg in project.get_installed_packages() - ]) + installed_package_names = project.installed_package_names # Intelligently detect if --dev should be used or not. lockfile_packages = set() if project.lockfile_exists: - develop = set( - [canonicalize_name(k) for k in project.lockfile_content["develop"].keys()] - ) - default = set( - [canonicalize_name(k) for k in project.lockfile_content["default"].keys()] - ) - lockfile_packages |= develop | default + project_pkg_names = project.lockfile_package_names else: - develop = set( - [canonicalize_name(k) for k in project.dev_packages.keys()] - ) - default = set( - [canonicalize_name(k) for k in project.packages.keys()] - ) + project_pkg_names = project.pipfile_package_names pipfile_remove = True # Uninstall [dev-packages], if --dev was provided. if all_dev: - if "dev-packages" not in project.parsed_pipfile and not develop: + if "dev-packages" not in project.parsed_pipfile and not project_pkg_names["dev"]: click.echo( crayons.normal( "No {0} to uninstall.".format(crayons.red("[dev-packages]")), @@ -2091,28 +2092,33 @@ def do_uninstall( fix_utf8("Un-installing {0}…".format(crayons.red("[dev-packages]"))), bold=True ) ) - package_names = develop + package_names = project_pkg_names["dev"] + # Remove known "bad packages" from the list. - bad_pkgs = set([canonicalize_name(pkg) for pkg in BAD_PACKAGES]) - for bad_package in BAD_PACKAGES: - normalized_bad_pkg = canonicalize_name(bad_package) - if normalized_bad_pkg in package_map: - if environments.is_verbose(): - click.echo("Ignoring {0}.".format(bad_package), err=True) - pkg_name_index = package_names.index(package_map[normalized_bad_pkg]) - del package_names[pkg_name_index] - used_packages = develop | default & installed_package_names + bad_pkgs = get_canonical_names(BAD_PACKAGES) + ignored_packages = bad_pkgs & set(list(package_map.keys())) + for ignored_pkg in ignored_packages: + if environments.is_verbose(): + click.echo("Ignoring {0}.".format(ignored_pkg), err=True) + pkg_name_index = package_names.index(package_map[ignored_pkg]) + del package_names[pkg_name_index] + + used_packages = project_pkg_names["combined"] & installed_package_names failure = False packages_to_remove = set() if all: - package_names = develop | default click.echo( - crayons.normal(fix_utf8("Un-installing all packages from virtualenv…"), bold=True) + crayons.normal( + fix_utf8("Un-installing all {0} and {1}…".format( + crayons.red("[dev-packages]"), + crayons.red("[packages]"), + )), bold=True + ) ) - do_purge(allow_global=system) - return + do_purge(bare=False, allow_global=system) + sys.exit(0) if all_dev: - package_names = develop + package_names = project_pkg_names["dev"] else: package_names = set([pkg_name for pkg_name in package_names]) selected_pkg_map = { @@ -2120,7 +2126,7 @@ def do_uninstall( } packages_to_remove = [ p for normalized, p in selected_pkg_map.items() - if (normalized in used_packages and normalized not in bad_pkgs) + if normalized in (used_packages - bad_pkgs) ] for normalized, package_name in selected_pkg_map.items(): click.echo( @@ -2130,15 +2136,16 @@ def do_uninstall( ) # Uninstall the package. if package_name in packages_to_remove: - cmd = "{0} uninstall {1} -y".format( - escape_grouped_arguments(which_pip(allow_global=system)), package_name, - ) - if environments.is_verbose(): - click.echo("$ {0}".format(cmd)) - c = delegator.run(cmd) - click.echo(crayons.blue(c.out)) - if c.return_code != 0: - failure = True + with project.environment.activated(): + cmd = "{0} uninstall {1} -y".format( + escape_grouped_arguments(which_pip(allow_global=system)), package_name, + ) + if environments.is_verbose(): + click.echo("$ {0}".format(cmd)) + c = delegator.run(cmd) + click.echo(crayons.blue(c.out)) + if c.return_code != 0: + failure = True if not failure and pipfile_remove: in_packages = project.get_package_name_in_pipfile(package_name, dev=False) in_dev_packages = project.get_package_name_in_pipfile( @@ -2646,9 +2653,9 @@ def do_clean(ctx, three=None, python=None, dry_run=False, bare=False, pypi_mirro ensure_lockfile(pypi_mirror=pypi_mirror) # Make sure that the virtualenv's site packages are configured correctly # otherwise we may end up removing from the global site packages directory - fix_venv_site(project.env_paths["lib"]) installed_package_names = [ - canonicalize_name(pkg.project_name) for pkg in project.get_installed_packages() + canonicalize_name(pkg.project_name) for pkg + in project.environment.get_installed_packages() ] # Remove known "bad packages" from the list. for bad_package in BAD_PACKAGES: diff --git a/pipenv/environment.py b/pipenv/environment.py new file mode 100644 index 0000000000..db0e22aa40 --- /dev/null +++ b/pipenv/environment.py @@ -0,0 +1,618 @@ +# -*- coding=utf-8 -*- + +import contextlib +import importlib +import json +import os +import sys +import operator +import pkg_resources +import six + +from distutils.sysconfig import get_python_lib +from sysconfig import get_paths + +from cached_property import cached_property + +import vistir +import pipenv + +BASE_WORKING_SET = pkg_resources.WorkingSet(sys.path) + + +class Environment(object): + def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, sources=None): + super(Environment, self).__init__() + self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} + self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET + self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + if not sources: + sources = [] + self.sources = sources + self.extra_dists = [] + prefix = prefix if prefix else sys.prefix + self.prefix = vistir.compat.Path(prefix) + + def safe_import(self, name): + """Helper utility for reimporting previously imported modules while inside the env""" + module = None + if name not in self._modules: + self._modules[name] = importlib.import_module(name) + module = self._modules[name] + if not module: + dist = next(iter( + dist for dist in self.base_working_set if dist.project_name == name + ), None) + if dist: + dist.activate() + module = importlib.import_module(name) + if name in sys.modules: + try: + six.moves.reload_module(module) + six.moves.reload_module(sys.modules[name]) + except TypeError: + del sys.modules[name] + sys.modules[name] = self._modules[name] + return module + + @classmethod + def resolve_dist(cls, dist, working_set): + """Given a local distribution and a working set, returns all dependencies from the set. + + :param dist: A single distribution to find the dependencies of + :type dist: :class:`pkg_resources.Distribution` + :param working_set: A working set to search for all packages + :type working_set: :class:`pkg_resources.WorkingSet` + :return: A set of distributions which the package depends on, including the package + :rtype: set(:class:`pkg_resources.Distribution`) + """ + + deps = set() + deps.add(dist) + try: + reqs = dist.requires() + except AttributeError: + return deps + for req in reqs: + dist = working_set.find(req) + deps |= cls.resolve_dist(dist, working_set) + return deps + + def add_dist(self, dist_name): + dist = pkg_resources.get_distribution(pkg_resources.Requirement(dist_name)) + extras = self.resolve_dist(dist, self.base_working_set) + if extras: + self.extra_dists.extend(extras) + + @cached_property + def python_version(self): + with self.activated(): + from sysconfig import get_python_version + py_version = get_python_version() + return py_version + + @property + def python_info(self): + include_dir = self.prefix / "include" + python_path = next(iter(list(include_dir.iterdir())), None) + if python_path and python_path.name.startswith("python"): + python_version = python_path.name.replace("python", "") + py_version_short, abiflags = python_version[:3], python_version[3:] + return {"py_version_short": py_version_short, "abiflags": abiflags} + return {} + + @cached_property + def base_paths(self): + """ + Returns the context appropriate paths for the environment. + + :return: A dictionary of environment specific paths to be used for installation operations + :rtype: dict + + .. note:: The implementation of this is borrowed from a combination of pip and + virtualenv and is likely to change at some point in the future. + + >>> from pipenv.core import project + >>> from pipenv.environment import Environment + >>> env = Environment(prefix=project.virtualenv_location, is_venv=True, sources=project.sources) + >>> import pprint + >>> pprint.pprint(env.base_paths) + {'PATH': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/bin::/bin:/usr/bin', + 'PYTHONPATH': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'data': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW', + 'include': '/home/hawk/.pyenv/versions/3.7.1/include/python3.7m', + 'libdir': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'platinclude': '/home/hawk/.pyenv/versions/3.7.1/include/python3.7m', + 'platlib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'platstdlib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7', + 'prefix': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW', + 'purelib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'scripts': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/bin', + 'stdlib': '/home/hawk/.pyenv/versions/3.7.1/lib/python3.7'} + """ + + prefix = self.prefix.as_posix() + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) + paths["PATH"] = paths["scripts"] + os.pathsep + os.defpath + if "prefix" not in paths: + paths["prefix"] = prefix + purelib = get_python_lib(plat_specific=0, prefix=prefix) + platlib = get_python_lib(plat_specific=1, prefix=prefix) + if purelib == platlib: + lib_dirs = purelib + else: + lib_dirs = purelib + os.pathsep + platlib + paths["libdir"] = purelib + paths["purelib"] = purelib + paths["platlib"] = platlib + paths['PYTHONPATH'] = lib_dirs + paths["libdirs"] = lib_dirs + return paths + + @cached_property + def script_basedir(self): + """Path to the environment scripts dir""" + script_dir = self.base_paths["scripts"] + return script_dir + + @property + def python(self): + """Path to the environment python""" + py = vistir.compat.Path(self.base_paths["scripts"]).joinpath("python").as_posix() + if not py: + return vistir.compat.Path(sys.executable).as_posix() + return py + + @cached_property + def sys_path(self): + """The system path inside the environment + + :return: The :data:`sys.path` from the environment + :rtype: list + """ + + current_executable = vistir.compat.Path(sys.executable).as_posix() + if not self.python or self.python == current_executable: + return sys.path + elif any([sys.prefix == self.prefix, not self.is_venv]): + return sys.path + cmd_args = [self.python, "-c", "import json, sys; print(json.dumps(sys.path))"] + path, _ = vistir.misc.run(cmd_args, return_object=False, nospin=True, block=True, combine_stderr=False) + path = json.loads(path.strip()) + return path + + @cached_property + def system_paths(self): + paths = {} + paths = get_paths() + return paths + + @cached_property + def sys_prefix(self): + """The prefix run inside the context of the environment + + :return: The python prefix inside the environment + :rtype: :data:`sys.prefix` + """ + + command = [self.python, "-c" "import sys; print(sys.prefix)"] + c = vistir.misc.run(command, return_object=True, block=True, nospin=True) + sys_prefix = vistir.compat.Path(vistir.misc.to_text(c.out).strip()).as_posix() + return sys_prefix + + @cached_property + def paths(self): + paths = {} + with vistir.contextmanagers.temp_environ(), vistir.contextmanagers.temp_path(): + os.environ["PYTHONIOENCODING"] = vistir.compat.fs_str("utf-8") + os.environ["PYTHONDONTWRITEBYTECODE"] = vistir.compat.fs_str("1") + paths = self.base_paths + os.environ["PATH"] = paths["PATH"] + os.environ["PYTHONPATH"] = paths["PYTHONPATH"] + if "headers" not in paths: + paths["headers"] = paths["include"] + return paths + + @property + def scripts_dir(self): + return self.paths["scripts"] + + @property + def libdir(self): + purelib = self.paths.get("purelib", None) + if purelib and os.path.exists(purelib): + return "purelib", purelib + return "platlib", self.paths["platlib"] + + def get_distributions(self): + """Retrives the distributions installed on the library path of the environment + + :return: A set of distributions found on the library path + :rtype: iterator + """ + + pkg_resources = self.safe_import("pkg_resources") + return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) + + def find_egg(self, egg_dist): + import site + site_packages = get_python_lib() + search_filename = "{0}.egg-link".format(egg_dist.project_name) + try: + user_site = site.getusersitepackages() + except AttributeError: + user_site = site.USER_SITE + search_locations = [site_packages, user_site] + for site_directory in search_locations: + egg = os.path.join(site_directory, search_filename) + if os.path.isfile(egg): + return egg + + def locate_dist(self, dist): + location = self.find_egg(dist) + if not location: + return dist.location + + def dist_is_in_project(self, dist): + from .project import _normalized + prefix = _normalized(self.base_paths["prefix"]) + location = self.locate_dist(dist) + if not location: + return False + return _normalized(location).startswith(prefix) + + def get_installed_packages(self): + workingset = self.get_working_set() + packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] + return packages + + def get_finder(self): + from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder + from .environments import PIPENV_CACHE_DIR + index_urls = [source.get("url") for source in self.sources] + + class PipCommand(Command): + name = "PipCommand" + + pip_command = PipCommand() + index_opts = cmdoptions.make_option_group( + index_group, pip_command.parser + ) + cmd_opts = pip_command.cmd_opts + pip_command.parser.insert_option_group(0, index_opts) + pip_command.parser.insert_option_group(0, cmd_opts) + pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources, []) + pip_options, _ = pip_command.parser.parse_args(pip_args) + pip_options.cache_dir = PIPENV_CACHE_DIR + pip_options.pre = self.pipfile.get("pre", False) + with pip_command._build_session(pip_options) as session: + finder = PackageFinder( + find_links=pip_options.find_links, + index_urls=index_urls, allow_all_prereleases=pip_options.pre, + trusted_hosts=pip_options.trusted_hosts, + process_dependency_links=pip_options.process_dependency_links, + session=session + ) + yield finder + + def get_package_info(self): + dependency_links = [] + packages = self.get_installed_packages() + # This code is borrowed from pip's current implementation + for dist in packages: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) + + with self.get_finder() as finder: + finder.add_dependency_links(dependency_links) + + for dist in packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not finder.pip_options.pre: + # Remove prereleases + all_candidates = [ + candidate for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + if not all_candidates: + continue + best_candidate = max(all_candidates, key=finder._candidate_sort_key) + remote_version = best_candidate.version + if best_candidate.location.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + yield dist + + def get_outdated_packages(self): + return [ + pkg for pkg in self.get_package_info() + if pkg.latest_version._version > pkg.parsed_version._version + ] + + def get_package_requirements(self): + from .vendor.pipdeptree import flatten, sorted_tree, build_dist_index, construct_tree + dist_index = build_dist_index(self.get_installed_packages()) + tree = sorted_tree(construct_tree(dist_index)) + branch_keys = set(r.key for r in flatten(tree.values())) + nodes = [p for p in tree.keys() if p.key not in branch_keys] + key_tree = dict((k.key, v) for k, v in tree.items()) + get_children = lambda n: key_tree.get(n.key, []) + + def aux(node, parent=None, chain=None): + if chain is None: + chain = [node.project_name] + + d = node.as_dict() + if parent: + d['required_version'] = node.version_spec if node.version_spec else 'Any' + else: + d['required_version'] = d['installed_version'] + + d['dependencies'] = [ + aux(c, parent=node, chain=chain+[c.project_name]) + for c in get_children(node) + if c.project_name not in chain + ] + + return d + return [aux(p) for p in nodes] + + def get_working_set(self): + """Retrieve the working set of installed packages for the environment. + + :return: The working set for the environment + :rtype: :class:`pkg_resources.WorkingSet` + """ + + working_set = pkg_resources.WorkingSet(self.sys_path) + return working_set + + def is_installed(self, pkgname): + """Given a package name, returns whether it is installed in the environment + + :param str pkgname: The name of a package + :return: Whether the supplied package is installed in the environment + :rtype: bool + """ + + return any(d for d in self.get_distributions() if d.project_name == pkgname) + + def run(self, cmd, cwd=os.curdir): + """Run a command with :class:`~subprocess.Popen` in the context of the environment + + :param cmd: A command to run in the environment + :type cmd: str or list + :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` + :return: A finished command object + :rtype: :class:`~subprocess.Popen` + """ + + c = None + with self.activated(): + script = vistir.cmdparse.Script.parse(cmd) + c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd) + return c + + def run_py(self, cmd, cwd=os.curdir): + """Run a python command in the enviornment context. + + :param cmd: A command to run in the environment - runs with `python -c` + :type cmd: str or list + :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` + :return: A finished command object + :rtype: :class:`~subprocess.Popen` + """ + + c = None + if isinstance(cmd, six.string_types): + script = vistir.cmdparse.Script.parse("{0} -c {1}".format(self.python, cmd)) + else: + script = vistir.cmdparse.Script.parse([self.python, "-c"] + list(cmd)) + with self.activated(): + c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd) + return c + + def run_activate_this(self): + """Runs the environment's inline activation script""" + if self.is_venv: + activate_this = os.path.join(self.scripts_dir, "activate_this.py") + if not os.path.isfile(activate_this): + raise OSError("No such file: {0!s}".format(activate_this)) + with open(activate_this, "r") as f: + code = compile(f.read(), activate_this, "exec") + exec(code, dict(__file__=activate_this)) + + @contextlib.contextmanager + def activated(self, include_extras=True, extra_dists=None): + """Helper context manager to activate the environment. + + This context manager will set the following variables for the duration + of its activation: + + * sys.prefix + * sys.path + * os.environ["VIRTUAL_ENV"] + * os.environ["PATH"] + + In addition, it will make any distributions passed into `extra_dists` available + on `sys.path` while inside the context manager, as well as making `passa` itself + available. + + The environment's `prefix` as well as `scripts_dir` properties are both prepended + to `os.environ["PATH"]` to ensure that calls to `~Environment.run()` use the + environment's path preferentially. + """ + + if not extra_dists: + extra_dists = [] + original_path = sys.path + original_prefix = sys.prefix + parent_path = vistir.compat.Path(__file__).absolute().parent + vendor_dir = parent_path.joinpath("vendor").as_posix() + patched_dir = parent_path.joinpath("patched").as_posix() + parent_path = parent_path.as_posix() + prefix = self.prefix.as_posix() + with vistir.contextmanagers.temp_environ(), vistir.contextmanagers.temp_path(): + os.environ["PATH"] = os.pathsep.join([ + vistir.compat.fs_str(self.scripts_dir), + vistir.compat.fs_str(self.prefix.as_posix()), + os.environ.get("PATH", "") + ]) + os.environ["PYTHONIOENCODING"] = vistir.compat.fs_str("utf-8") + os.environ["PYTHONDONTWRITEBYTECODE"] = vistir.compat.fs_str("1") + os.environ["PATH"] = self.base_paths["PATH"] + os.environ["PYTHONPATH"] = self.base_paths["PYTHONPATH"] + if self.is_venv: + os.environ["VIRTUAL_ENV"] = vistir.compat.fs_str(prefix) + sys.path = self.sys_path + sys.prefix = self.sys_prefix + site = self.safe_import("site") + site.addsitedir(self.base_paths["purelib"]) + if include_extras: + site.addsitedir(parent_path) + sys.path.extend([parent_path, patched_dir, vendor_dir]) + extra_dists = list(self.extra_dists) + extra_dists + for extra_dist in extra_dists: + if extra_dist not in self.get_working_set(): + extra_dist.activate(self.sys_path) + try: + yield + finally: + sys.path = original_path + sys.prefix = original_prefix + six.moves.reload_module(pkg_resources) + + @cached_property + def finders(self): + from pipenv.vendor.pythonfinder import Finder + finders = [ + Finder(path=self.base_paths["scripts"], global_search=gs, system=False) + for gs in (False, True) + ] + return finders + + @property + def finder(self): + return next(iter(self.finders), None) + + def which(self, search, as_path=True): + find = operator.methodcaller("which", search) + result = next(iter(filter(None, (find(finder) for finder in self.finders))), None) + if not result: + result = self._which(search) + else: + if as_path: + result = str(result.path) + return result + + def get_install_args(self, editable=False, setup_path=None): + install_arg = "install" if not editable else "develop" + install_keys = ["headers", "purelib", "platlib", "scripts", "data"] + install_args = [ + self.environment.python, "-u", "-c", SETUPTOOLS_SHIM % setup_path, + install_arg, "--single-version-externally-managed", "--no-deps", + "--prefix={0}".format(self.base_paths["prefix"]), "--no-warn-script-location" + ] + for key in install_keys: + install_args.append( + "--install-{0}={1}".format(key, self.base_paths[key]) + ) + return install_args + + def install(self, requirements): + if not isinstance(requirements, (tuple, list)): + requirements = [requirements,] + with self.get_finder() as finder: + args = [] + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + if finder.index_urls: + args.extend(['-i', finder.index_urls[0]]) + for extra_index in finder.index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + for _, host, _ in finder.secure_origins: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + if finder.process_dependency_links: + args.append('--process-dependency-links') + args.append('--') + args.extend(requirements) + out, _ = vistir.misc.run(args, return_object=False, nospin=True, block=True, + combine_stderr=False) + + @contextlib.contextmanager + def uninstall(self, pkgname, *args, **kwargs): + """A context manager which allows uninstallation of packages from the environment + + :param str pkgname: The name of a package to uninstall + + >>> env = Environment("/path/to/env/root") + >>> with env.uninstall("pytz", auto_confirm=True, verbose=False) as uninstaller: + cleaned = uninstaller.paths + >>> if cleaned: + print("uninstalled packages: %s" % cleaned) + """ + + auto_confirm = kwargs.pop("auto_confirm", True) + verbose = kwargs.pop("verbose", False) + with self.activated(): + monkey_patch = next(iter( + dist for dist in self.base_working_set + if dist.project_name == "recursive-monkey-patch" + ), None) + if monkey_patch: + monkey_patch.activate() + pip_shims = self.safe_import("pip_shims") + pathset_base = pip_shims.UninstallPathSet + import recursive_monkey_patch + recursive_monkey_patch.monkey_patch( + PatchedUninstaller, pathset_base + ) + dist = next( + iter(filter(lambda d: d.project_name == pkgname, self.get_working_set())), + None + ) + pathset = pathset_base.from_dist(dist) + if pathset is not None: + pathset.remove(auto_confirm=auto_confirm, verbose=verbose) + try: + yield pathset + except Exception as e: + if pathset is not None: + pathset.rollback() + else: + if pathset is not None: + pathset.commit() + if pathset is None: + return + + +class PatchedUninstaller(object): + def _permitted(self, path): + return True + + +SETUPTOOLS_SHIM = ( + "import setuptools, tokenize;__file__=%r;" + "f=getattr(tokenize, 'open', open)(__file__);" + "code=f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) diff --git a/pipenv/project.py b/pipenv/project.py index d4713b89f2..d3b56be6d7 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -19,6 +19,7 @@ import toml import tomlkit +from .environment import Environment from .cmdparse import Script from .utils import ( pep423_name, @@ -35,7 +36,7 @@ get_workon_home, is_virtual_environment, looks_like_dir, - sys_version + get_canonical_names ) from .environments import ( PIPENV_MAX_DEPTH, @@ -45,7 +46,6 @@ PIPENV_TEST_INDEX, PIPENV_PYTHON, PIPENV_DEFAULT_PYTHON_VERSION, - PIPENV_CACHE_DIR ) @@ -154,6 +154,7 @@ def __init__(self, which=None, python_version=None, chdir=True): self._lockfile_newlines = DEFAULT_NEWLINES self._requirements_location = None self._original_dir = os.path.abspath(os.curdir) + self._environment = None self._which = which self._build_system = { "requires": ["setuptools", "wheel"] @@ -316,105 +317,48 @@ def working_set(self): import pkg_resources return pkg_resources.WorkingSet(sys_path) - def find_egg(self, egg_dist): - import site - from distutils import sysconfig as distutils_sysconfig - site_packages = distutils_sysconfig.get_python_lib() - search_filename = "{0}.egg-link".format(egg_dist.project_name) - try: - user_site = site.getusersitepackages() - except AttributeError: - user_site = site.USER_SITE - search_locations = [site_packages, user_site] - for site_directory in search_locations: - egg = os.path.join(site_directory, search_filename) - if os.path.isfile(egg): - return egg - - def locate_dist(self, dist): - location = self.find_egg(dist) - if not location: - return dist.location - - def dist_is_in_project(self, dist): - prefix = _normalized(self.env_paths["prefix"]) - location = self.locate_dist(dist) - if not location: - return False - return _normalized(location).startswith(prefix) - - def get_installed_packages(self): - workingset = self.working_set - if self.virtualenv_exists: - packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] - else: - packages = [pkg for pkg in packages] - return packages + @property + def installed_packages(self): + return self.environment.get_installed_packages() - def get_package_info(self): - from .utils import prepare_pip_source_args - from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder - index_urls = [source.get("url") for source in self.sources] + @property + def installed_package_names(self): + return get_canonical_names([pkg.key for pkg in self.installed_packages]) - class PipCommand(Command): - name = "PipCommand" + @property + def lockfile_package_names(self): + dev_keys = get_canonical_names(self.lockfile_content["develop"].keys()) + default_keys = get_canonical_names(self.lockfile_content["default"].keys()) + return { + "dev": dev_keys, + "default": default_keys, + "combined": dev_keys | default_keys + } - dependency_links = [] - packages = self.get_installed_packages() - # This code is borrowed from pip's current implementation - for dist in packages: - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) + @property + def pipfile_package_names(self): + dev_keys = get_canonical_names(self.dev_packages.keys()) + default_keys = get_canonical_names(self.packages.keys()) + return { + "dev": dev_keys, + "default": default_keys, + "combined": dev_keys | default_keys + } - pip_command = PipCommand() - index_opts = cmdoptions.make_option_group( - index_group, pip_command.parser - ) - cmd_opts = pip_command.cmd_opts - pip_command.parser.insert_option_group(0, index_opts) - pip_command.parser.insert_option_group(0, cmd_opts) - pip_args = prepare_pip_source_args(self.sources, []) - pip_options, _ = pip_command.parser.parse_args(pip_args) - pip_options.cache_dir = PIPENV_CACHE_DIR - pip_options.pre = self.settings.get("pre", False) - with pip_command._build_session(pip_options) as session: - finder = PackageFinder( - find_links=pip_options.find_links, - index_urls=index_urls, allow_all_prereleases=pip_options.pre, - trusted_hosts=pip_options.trusted_hosts, - process_dependency_links=pip_options.process_dependency_links, - session=session + @property + def environment(self): + if not self._environment: + prefix = self.get_location_for_virtualenv() + is_venv = prefix == sys.prefix + sources = self.sources.copy() if self.sources else [DEFAULT_SOURCE,] + self._environment = Environment( + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile ) - finder.add_dependency_links(dependency_links) - - for dist in packages: - typ = 'unknown' - all_candidates = finder.find_all_candidates(dist.key) - if not pip_options.pre: - # Remove prereleases - all_candidates = [ - candidate for candidate in all_candidates - if not candidate.version.is_prerelease - ] - - if not all_candidates: - continue - best_candidate = max(all_candidates, key=finder._candidate_sort_key) - remote_version = best_candidate.version - if best_candidate.location.is_wheel: - typ = 'wheel' - else: - typ = 'sdist' - # This is dirty but makes the rest of the code much cleaner - dist.latest_version = remote_version - dist.latest_filetype = typ - yield dist + self._environment.add_dist("pipenv") + return self._environment def get_outdated_packages(self): - return [ - pkg for pkg in self.get_package_info() - if pkg.latest_version._version > pkg.parsed_version._version - ] + return self.environment.get_outdated_packages() @classmethod def _sanitize(cls, name): @@ -588,7 +532,6 @@ def dump_dict(dictionary, write_to, inline=False): :return: A new toml hierarchical document """ - def gen_table(inline=False): if inline: return tomlkit.inline_table() @@ -1159,49 +1102,6 @@ def proper_case_section(self, section): # Return whether or not values have been changed. return changed_values - @property - def py_version(self): - py_path = self.which("python") - version = python_version(py_path) - return version - - @property - def _pyversion(self): - include_dir = vistir.compat.Path(self.virtualenv_location) / "include" - python_path = next((x for x in include_dir.iterdir() if x.name.startswith("python")), None) - if python_path: - py_version = python_path.name.replace("python", "") - py_version_short, abiflags = py_version[:3], py_version[3:] - return {"py_version_short": py_version_short, "abiflags": abiflags} - return {} - - @property - def env_paths(self): - location = self.virtualenv_location if self.virtualenv_location else sys.prefix - prefix = vistir.compat.Path(location) - import importlib - py_version = tuple([int(v) for v in self.py_version.split(".")]) - py_version_short = ".".join([str(v) for v in py_version[:2]]) - running_version = ".".join([str(v) for v in sys.version_info[:2]]) - try: - _virtualenv = importlib.import_module("virtualenv") - except (ImportError, AttributeError): - with vistir.contextmanagers.temp_path(): - sys.path = vistir.misc.load_path(self.which("python")) - six.moves.reload_module(importlib) - _virtualenv = importlib.import_module("virtualenv") - with sys_version(py_version): - home, lib, inc, bin_ = _virtualenv.path_locations(prefix.absolute().as_posix()) - paths = { - "lib": lib.replace(running_version, py_version_short), - "include": inc.replace(running_version, py_version_short), - "scripts": bin_, - "purelib": lib.replace(running_version, py_version_short), - "prefix": home, - "base": home - } - return paths - @cached_property def finders(self): from .vendor.pythonfinder import Finder From 642b6f94b55924ba5e3af199d6fd679e369a9ee6 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:41:16 -0500 Subject: [PATCH 05/23] Update vistir and requirementslib Signed-off-by: Dan Ryan --- .../requirementslib/models/requirements.py | 25 ++++++---- .../requirementslib/models/setup_info.py | 2 +- pipenv/vendor/vistir/compat.py | 50 +++++++++++++++++-- pipenv/vendor/vistir/misc.py | 7 ++- pipenv/vendor/vistir/path.py | 31 +++++++----- pipenv/vendor/vistir/spin.py | 2 +- 6 files changed, 86 insertions(+), 31 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 514114298c..aafb059b97 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1050,9 +1050,9 @@ def copy(self): @classmethod def from_line(cls, line): - from pip_shims import InstallRequirement + import pip_shims.shims - if isinstance(line, InstallRequirement): + if isinstance(line, pip_shims.shims.InstallRequirement): line = format_requirement(line) hashes = None if "--hash=" in line: @@ -1130,13 +1130,20 @@ def from_line(cls, line): if hashes: args["hashes"] = hashes cls_inst = cls(**args) - if not cls_inst.is_named and (not cls_inst.editable or cls_inst.req._has_hashed_name): - old_name = cls_inst.req.req.name or cls_inst.req.name - info_dict = cls_inst.run_requires() - calced_name = info_dict.get("name", old_name) - if old_name != calced_name: - cls_inst.req.req.line.replace(old_name, calced_name) - cls_inst.name = cls_inst.req.name = calced_name + if not cls_inst.is_named and not cls_inst.editable and not name: + if cls_inst.is_vcs: + ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) + info = SetupInfo.from_ireq(ireq) + if info is not None: + info_dict = info.as_dict() + cls_inst.req.setup_info = info + else: + info_dict = {} + else: + info_dict = cls_inst.run_requires() + found_name = info_dict.get("name", old_name) + if old_name != found_name: + cls_inst.req.req.line.replace(old_name, found_name) return cls_inst @classmethod diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 319dd6bdcb..f2a1ee7929 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -103,7 +103,7 @@ def iter_egginfos(path, pkg_name=None): def find_egginfo(target, pkg_name=None): egg_dirs = (egg_dir for egg_dir in iter_egginfos(target, pkg_name=pkg_name)) if pkg_name: - return next(iter(egg_dirs), None) + yield next(iter(egg_dirs), None) else: for egg_dir in egg_dirs: yield egg_dir diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index d6e8578a18..8322648101 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -20,6 +20,8 @@ "FileNotFoundError", "ResourceWarning", "FileNotFoundError", + "PermissionError", + "IsADirectoryError", "fs_str", "lru_cache", "TemporaryDirectory", @@ -69,8 +71,17 @@ def __init__(self, *args, **kwargs): self.errno = errno.ENOENT super(FileNotFoundError, self).__init__(*args, **kwargs) + class PermissionError(OSError): + def __init__(self, *args, **kwargs): + self.errno = errno.EACCES + super(PermissionError, self).__init__(*args, **kwargs) + + class IsADirectoryError(OSError): + """The command does not work on directories""" + pass + else: - from builtins import ResourceWarning, FileNotFoundError + from builtins import ResourceWarning, FileNotFoundError, PermissionError, IsADirectoryError if not sys.warnoptions: @@ -111,9 +122,39 @@ def __init__(self, suffix="", prefix=None, dir=None): ) @classmethod - def _cleanup(cls, name, warn_message): + def _rmtree(cls, name): from .path import rmtree - rmtree(name) + + def onerror(func, path, exc_info): + if issubclass(exc_info[0], (PermissionError, OSError)): + try: + try: + if path != name: + os.chflags(os.path.dirname(path), 0) + os.chflags(path, 0) + except AttributeError: + pass + if path != name: + os.chmod(os.path.dirname(path), 0o70) + os.chmod(path, 0o700) + + try: + os.unlink(path) + # PermissionError is raised on FreeBSD for directories + except (IsADirectoryError, PermissionError, OSError): + cls._rmtree(path) + except FileNotFoundError: + pass + elif issubclass(exc_info[0], FileNotFoundError): + pass + else: + raise + + rmtree(name, onerror=onerror) + + @classmethod + def _cleanup(cls, name, warn_message): + cls._rmtree(name) warnings.warn(warn_message, ResourceWarning) def __repr__(self): @@ -126,9 +167,8 @@ def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): - from .path import rmtree if self._finalizer.detach(): - rmtree(self.name) + self._rmtree(self.name) def fs_str(string): diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 7342bc97de..a9a127d81a 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -35,7 +35,9 @@ class WindowsError(OSError): "locale_encoding", "chunked", "take", - "divide" + "divide", + "getpreferredencoding", + "decode_for_output", ] @@ -492,7 +494,8 @@ def chunked(n, iterable): def getpreferredencoding(): - import locale + """Determine the proper output encoding for terminal rendering""" + # Borrowed from Invoke # (see https://github.com/pyinvoke/invoke/blob/93af29d/invoke/runners.py#L881) _encoding = locale.getpreferredencoding(False) diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index b1236884e9..23ae025205 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -183,10 +183,9 @@ def mkdir_p(newdir, mode=0o777): :raises: OSError if a file is encountered along the way """ # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ - from .misc import to_text - from .compat import to_native_string + from .misc import to_bytes, to_text - newdir = to_native_string(newdir) + newdir = to_bytes(newdir, "utf-8") if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( @@ -195,9 +194,9 @@ def mkdir_p(newdir, mode=0o777): ) ) else: - head, tail = os.path.split(newdir) + head, tail = os.path.split(to_bytes(newdir, encoding="utf-8")) # Make sure the tail doesn't point to the asame place as the head - curdir = to_native_string(".") + curdir = to_bytes(".", encoding="utf-8") tail_and_head_match = ( os.path.relpath(tail, start=os.path.basename(head)) == curdir ) @@ -205,8 +204,9 @@ def mkdir_p(newdir, mode=0o777): target = os.path.join(head, tail) if os.path.exists(target) and os.path.isfile(target): raise OSError( - "A file with the same name as the desired dir, '{0}', " - "already exists.".format(to_text(newdir, encoding="utf-8")) + "A file with the same name as the desired dir, '{0}', already exists.".format( + to_text(newdir, encoding="utf-8") + ) ) os.makedirs(os.path.join(head, tail), mode) @@ -277,13 +277,13 @@ def set_write_bit(fn): if not os.path.isdir(fn): return for root, dirs, files in os.walk(fn, topdown=False): - for dir_ in [os.path.join(root,d) for d in dirs]: + for dir_ in [os.path.join(root, d) for d in dirs]: set_write_bit(dir_) for file_ in [os.path.join(root, f) for f in files]: set_write_bit(file_) -def rmtree(directory, ignore_errors=False): +def rmtree(directory, ignore_errors=False, onerror=None): """Stand-in for :func:`~shutil.rmtree` with additional error-handling. This version of `rmtree` handles read-only paths, especially in the case of index @@ -291,6 +291,7 @@ def rmtree(directory, ignore_errors=False): :param str directory: The target directory to remove :param bool ignore_errors: Whether to ignore errors, defaults to False + :param func onerror: An error handling function, defaults to :func:`handle_remove_readonly` .. note:: @@ -300,9 +301,11 @@ def rmtree(directory, ignore_errors=False): from .compat import to_native_string directory = to_native_string(directory) + if onerror is None: + onerror = handle_remove_readonly try: shutil.rmtree( - directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly + directory, ignore_errors=ignore_errors, onerror=onerror ) except (IOError, OSError, FileNotFoundError) as exc: # Ignore removal failures where the file doesn't exist @@ -325,7 +328,9 @@ def handle_remove_readonly(func, path, exc): :func:`set_write_bit` on the target path and try again. """ # Check for read-only attribute - from .compat import ResourceWarning, FileNotFoundError, to_native_string + from .compat import ( + ResourceWarning, FileNotFoundError, PermissionError, to_native_string + ) PERM_ERRORS = (errno.EACCES, errno.EPERM, errno.ENOENT) default_warning_message = ( @@ -339,7 +344,7 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError, FileNotFoundError) as e: + except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno == errno.ENOENT: return elif e.errno in PERM_ERRORS: @@ -350,7 +355,7 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError, FileNotFoundError) as e: + except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno in PERM_ERRORS: warnings.warn(default_warning_message.format(path), ResourceWarning) pass diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index f0d9e77ffd..09ecbacea8 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -292,6 +292,6 @@ def _clear_line(): def create_spinner(*args, **kwargs): nospin = kwargs.pop("nospin", False) use_yaspin = kwargs.pop("use_yaspin", nospin) - if nospin: + if nospin or not use_yaspin: return DummySpinner(*args, **kwargs) return VistirSpinner(*args, **kwargs) From 45100b8a46f10955e9cf89a6932b695219048695 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:44:35 -0500 Subject: [PATCH 06/23] Fix stdout and stderr wrappers Signed-off-by: Dan Ryan --- news/3196.vendor.rst | 1 + pipenv/__init__.py | 10 ++++++---- pipenv/resolver.py | 11 ++++++++--- 3 files changed, 15 insertions(+), 7 deletions(-) create mode 100644 news/3196.vendor.rst diff --git a/news/3196.vendor.rst b/news/3196.vendor.rst new file mode 100644 index 0000000000..19351e2e73 --- /dev/null +++ b/news/3196.vendor.rst @@ -0,0 +1 @@ +Updated ``requirementslib`` to aid in resolution of local and remote archives. diff --git a/pipenv/__init__.py b/pipenv/__init__.py index ba4dd9c3e0..4d137e7f6c 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -28,10 +28,12 @@ if sys.stdout.isatty() and sys.stderr.isatty(): import io import atexit - sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') - atexit.register(sys.stdout.close) - sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') - atexit.register(sys.stdout.close) + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = fs_str("1") diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 9ef46878c9..e87f324350 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -99,8 +99,13 @@ def main(): import io import six if six.PY3: - sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') - sys.stderr = io.TextIOWrapper(sys.stderr.buffer,encoding='utf8') + import atexit + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper else: from pipenv._compat import force_encoding force_encoding() @@ -111,7 +116,7 @@ def main(): # sys.argv = remaining parsed = handle_parsed_args(parsed) _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, - parsed.requirements_dir, parsed.packages) + parsed.requirements_dir, parsed.packages) if __name__ == "__main__": From 0caf7a013280d173354260e62ab17f3843ecd85d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:05:25 -0500 Subject: [PATCH 07/23] Fix configparser import Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index f2a1ee7929..481f0494c5 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,5 +1,4 @@ # -*- coding=utf-8 -*- -import configparser import contextlib import os @@ -14,6 +13,7 @@ import distutils from appdirs import user_cache_dir +from six.moves import configparser from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd From 650cc32fe676619a3478b7712e48b511a6f2ac4b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:49:44 -0500 Subject: [PATCH 08/23] Fix resource errors Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 5 ++--- pipenv/vendor/requirementslib/models/cache.py | 2 ++ pipenv/vendor/requirementslib/models/dependencies.py | 3 +++ 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..5e86737491 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 881e9ac9d9..05fd19438c 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -3,12 +3,11 @@ import logging import warnings - -warnings.filterwarnings("ignore", category=ResourceWarning) - +from vistir.compat import ResourceWarning logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) +warnings.filterwarnings("ignore", category=ResourceWarning) from .models.requirements import Requirement from .models.lockfile import Lockfile diff --git a/pipenv/vendor/requirementslib/models/cache.py b/pipenv/vendor/requirementslib/models/cache.py index 0b8c47b19d..f1639ea228 100644 --- a/pipenv/vendor/requirementslib/models/cache.py +++ b/pipenv/vendor/requirementslib/models/cache.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals +import atexit import copy import hashlib import json @@ -197,6 +198,7 @@ def __init__(self, *args, **kwargs): if not session: import requests session = requests.session() + atexit.register(session.close) cache_dir = kwargs.pop('cache_dir', CACHE_DIR) self.session = session kwargs.setdefault('directory', os.path.join(cache_dir, 'hash-cache')) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 48e84d0fc3..f87fd585e9 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -1,5 +1,6 @@ # -*- coding=utf-8 -*- +import atexit import contextlib import copy import functools @@ -361,6 +362,7 @@ def get_dependencies_from_json(ireq): return session = requests.session() + atexit.register(session.close) version = str(ireq.req.specifier).lstrip("=") def gen(ireq): @@ -575,6 +577,7 @@ def get_finder(sources=None, pip_command=None, pip_options=None): if not pip_options: pip_options = get_pip_options(sources=sources, pip_command=pip_command) session = pip_command._build_session(pip_options) + atexit.register(session.close) finder = pip_shims.shims.PackageFinder( find_links=[], index_urls=[s.get("url") for s in sources], From 382be38bbabb3b65425148548bd51fddc1fb28e1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 19:03:55 -0500 Subject: [PATCH 09/23] Fix python 2.7 installations Signed-off-by: Dan Ryan --- pipenv/core.py | 20 ++++++++++++-------- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/exceptions.py | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index c5891a5726..3dd126bffe 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1847,7 +1847,7 @@ def do_install( # Install all dependencies, if none was provided. # This basically ensures that we have a pipfile and lockfile, then it locks and # installs from the lockfile - if packages is False and editable_packages is False: + if not packages and not editable_packages: # Update project settings with pre preference. if pre: project.update_settings({"allow_prereleases": pre}) @@ -1872,13 +1872,17 @@ def do_install( # make a tuple of (display_name, entry) pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] if not system and not project.virtualenv_exists: - with create_spinner("Creating virtualenv...") as sp: - try: - do_create_virtualenv(pypi_mirror=pypi_mirror) - except KeyboardInterrupt: - cleanup_virtualenv(bare=(not environments.is_verbose())) - sys.exit(1) - sp.write_err("Ok...") + do_init( + dev=dev, + system=system, + allow_global=system, + concurrent=concurrent, + keep_outdated=keep_outdated, + requirements_dir=requirements_directory, + deploy=deploy, + pypi_mirror=pypi_mirror, + skip_lock=skip_lock, + ) for pkg_line in pkg_list: click.echo( crayons.normal( diff --git a/pipenv/environment.py b/pipenv/environment.py index 5e86737491..db0e22aa40 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index 1a73f98e77..23bc5e5024 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import +from __future__ import absolute_import, print_function import errno import os import six From 489e534c9ae0d912390a2681b0887839d492bbf2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:38:09 -0500 Subject: [PATCH 10/23] Fix various bugs with python 2.7 and vendored deps Signed-off-by: Dan Ryan --- pipenv/project.py | 2 +- .../requirementslib/models/setup_info.py | 30 ++++++++++++++++--- pipenv/vendor/vistir/contextmanagers.py | 8 +++-- pipenv/vendor/vistir/spin.py | 2 +- 4 files changed, 34 insertions(+), 8 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index fbc18f1970..0eafff8e79 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -350,7 +350,7 @@ def environment(self): if not self._environment: prefix = self.get_location_for_virtualenv() is_venv = prefix == sys.prefix - sources = self.sources.copy() if self.sources else [DEFAULT_SOURCE,] + sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile ) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 481f0494c5..247d63f2ef 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- import contextlib import os +import sys import attr import packaging.version @@ -29,6 +30,11 @@ CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) +# The following are necessary for people who like to use "if __name__" conditionals +# in their setup.py scripts +_setup_stop_after = None +_setup_distribution = None + @contextlib.contextmanager def _suppress_distutils_logs(): @@ -116,7 +122,7 @@ def get_metadata(path, pkg_name=None): if egg_dir is not None: import pkg_resources - egg_dir = os.path.abspath(egg_dir) + egg_dir = os.path.abspath(egg_dir.path) base_dir = os.path.dirname(egg_dir) path_metadata = pkg_resources.PathMetadata(base_dir, egg_dir) dist = next( @@ -216,10 +222,26 @@ def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): from setuptools.dist import distutils + save_argv = sys.argv.copy() + try: + # This is for you, Hynek + # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + global _setup_distribution, _setup_stop_after + _setup_stop_after = "run" + script_name = self.setup_py.as_posix() + g = {"__file__": script_name, "__name__": "__main__"} + sys.argv[0] = script_name + sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + with open(script_name, 'rb') as f: + exec(f.read(), g) + finally: + _setup_stop_after = None + sys.argv = save_argv + dist = _setup_distribution + if not dist: + self.get_egg_metadata() + return - dist = distutils.core.run_setup( - self.setup_py.as_posix(), ["egg_info", "--egg-base", self.base_dir] - ) name = dist.get_name() if name: self.name = name diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 59b97ca0e3..3f19112087 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -118,10 +118,11 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): """ from .spin import create_spinner - has_yaspin = False + has_yaspin = None try: import yaspin except ImportError: + has_yaspin = False if not nospin: raise RuntimeError( "Failed to import spinner! Reinstall vistir with command:" @@ -132,6 +133,9 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): else: has_yaspin = True spinner_name = "" + use_yaspin = (has_yaspin is False) or (nospin is True) + if has_yaspin is None or has_yaspin is True and not nospin: + use_yaspin = True if not start_text and nospin is False: start_text = "Running..." with create_spinner( @@ -139,7 +143,7 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): text=start_text, handler_map=handler_map, nospin=nospin, - use_yaspin=has_yaspin + use_yaspin=use_yaspin ) as _spinner: yield _spinner diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 09ecbacea8..e7311555f7 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -291,7 +291,7 @@ def _clear_line(): def create_spinner(*args, **kwargs): nospin = kwargs.pop("nospin", False) - use_yaspin = kwargs.pop("use_yaspin", nospin) + use_yaspin = kwargs.pop("use_yaspin", not nospin) if nospin or not use_yaspin: return DummySpinner(*args, **kwargs) return VistirSpinner(*args, **kwargs) From 32b1113fbb6bad61f138f1aa1c47d1a686f35baa Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:58:52 -0500 Subject: [PATCH 11/23] Support python 2 parsing Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 247d63f2ef..561ba15610 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -221,19 +221,22 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): - from setuptools.dist import distutils - save_argv = sys.argv.copy() - try: + if sys.version_info < (3, 5): + save_argv = sys.argv[:] + else: + save_argv = sys.argv.copy() # This is for you, Hynek # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" script_name = self.setup_py.as_posix() g = {"__file__": script_name, "__name__": "__main__"} + l = {} sys.argv[0] = script_name sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] with open(script_name, 'rb') as f: - exec(f.read(), g) + exec(f.read(), g, l) finally: _setup_stop_after = None sys.argv = save_argv From 1216ae0c8a898ce81a3261762cf759c1d9fe5c4f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 00:08:28 -0500 Subject: [PATCH 12/23] Fix environment site import Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..c3c33dda24 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -7,6 +7,7 @@ import sys import operator import pkg_resources +import site import six from distutils.sysconfig import get_python_lib @@ -239,7 +240,6 @@ def get_distributions(self): return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) def find_egg(self, egg_dist): - import site site_packages = get_python_lib() search_filename = "{0}.egg-link".format(egg_dist.project_name) try: @@ -476,7 +476,6 @@ def activated(self, include_extras=True, extra_dists=None): os.environ["VIRTUAL_ENV"] = vistir.compat.fs_str(prefix) sys.path = self.sys_path sys.prefix = self.sys_prefix - site = self.safe_import("site") site.addsitedir(self.base_paths["purelib"]) if include_extras: site.addsitedir(parent_path) From 70fc92b08d213209a7f4363c616fe490cef9dc66 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 10:05:21 -0500 Subject: [PATCH 13/23] Fix import errors on setup parsing Signed-off-by: Dan Ryan --- .../requirementslib/models/setup_info.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 561ba15610..6107a24075 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -18,6 +18,7 @@ from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd +from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p from .utils import init_requirement, get_pyproject @@ -220,9 +221,15 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): - with cd(self.setup_py.parent), _suppress_distutils_logs(): + target_cwd = self.setup_py.parent.as_posix() + with cd(target_cwd), _suppress_distutils_logs(): + from setuptools.dist import distutils + script_name = self.setup_py.as_posix() + args = ["egg_info", "--egg-base", self.base_dir] + g = {"__file__": script_name, "__name__": "__main__"} + local_dict = {} if sys.version_info < (3, 5): - save_argv = sys.argv[:] + save_argv = sys.argv else: save_argv = sys.argv.copy() # This is for you, Hynek @@ -230,13 +237,18 @@ def run_setup(self): try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" - script_name = self.setup_py.as_posix() - g = {"__file__": script_name, "__name__": "__main__"} - l = {} sys.argv[0] = script_name - sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + sys.argv[1:] = args with open(script_name, 'rb') as f: - exec(f.read(), g, l) + if sys.version_info < (3, 5): + exec(f.read(), g, local_dict) + else: + exec(f.read(), g) + # We couldn't import everything needed to run setup + except NameError: + python = os.environ.get('PIP_PYTHON_PATH', sys.executable) + out, _ = run([python, "setup.py"] + args, cwd=target_cwd, block=True, + combine_stderr=False, return_object=False, nospin=True) finally: _setup_stop_after = None sys.argv = save_argv From 013e3d0ec22607769ac749544c1e5f20ade261c2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 12:04:58 -0500 Subject: [PATCH 14/23] Revendor Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 1 - pipenv/vendor/pythonfinder/pythonfinder.py | 4 ++-- pipenv/vendor/pythonfinder/utils.py | 2 +- pipenv/vendor/requirementslib/models/lockfile.py | 4 ++-- .../vendor/requirementslib/models/requirements.py | 15 +++++---------- pipenv/vendor/requirementslib/models/utils.py | 1 - pipenv/vendor/vendor.txt | 6 +++--- 7 files changed, 13 insertions(+), 20 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 33b4ab58cb..3d01e7cf14 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -448,7 +448,6 @@ def get_py_version(self): if self.is_dir: return None if self.is_python: - from .python import PythonVersion try: py_version = PythonVersion.from_path(path=self, name=self.name) except InvalidPythonVersion: diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index 854cc8e7cf..b3bad57042 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -65,7 +65,7 @@ def windows_finder(self): def which(self, exe): return self.system_path.which(exe) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_python_version( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): @@ -113,7 +113,7 @@ def find_python_version( major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_all_python_versions( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index ca07b42f76..42a63e54f3 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -90,7 +90,7 @@ def looks_like_python(name): return any(fnmatch(name, rule) for rule in MATCH_RULES) -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def path_is_python(path): return path_is_executable(path) and looks_like_python(path.name) diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 6f61f57ebc..9d19edaf7c 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -223,13 +223,13 @@ def load(cls, path, create=True): try: projectfile = cls.load_projectfile(path, create=create) - except JSONDecodeError as e: + except JSONDecodeError: path = os.path.abspath(path) if not os.path.isdir(path): path = os.path.dirname(path) path = Path(os.path.join(path, "Pipfile.lock")) formatted_path = path.as_posix() - backup_path = "%.bak" % formatted_path + backup_path = "%s.bak" % formatted_path LockfileCorruptException.show(formatted_path, backup_path=backup_path) path.rename(backup_path) cls.load(formatted_path, create=True) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index aafb059b97..d5330b48f4 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -14,7 +14,7 @@ from first import first from packaging.markers import Marker from packaging.requirements import Requirement as PackagingRequirement -from packaging.specifiers import Specifier, SpecifierSet +from packaging.specifiers import Specifier, SpecifierSet, LegacySpecifier, InvalidSpecifier from packaging.utils import canonicalize_name from six.moves.urllib import parse as urllib_parse from six.moves.urllib.parse import unquote @@ -325,9 +325,6 @@ def get_name(self): if setup_name: name = setup_name self._has_hashed_name = False - version = setupinfo_dict.get("version") - if version and not self.version: - self.version = version build_requires = setupinfo_dict.get("build_requires") build_backend = setupinfo_dict.get("build_backend") if build_requires and not self.pyproject_requires: @@ -404,7 +401,6 @@ def create( cls, path=None, uri=None, editable=False, extras=None, link=None, vcs_type=None, name=None, req=None, line=None, uri_scheme=None, setup_path=None, relpath=None ): - import pip_shims.shims if relpath and not path: path = relpath if not path and uri and link.scheme == "file": @@ -455,7 +451,6 @@ def create( creation_kwargs["vcs_type"] = vcs_type _line = None if not name: - import pip_shims.shims _line = unquote(link.url_without_fragment) if link.url else uri if editable: ireq = pip_shims.shims.install_req_from_editable(_line) @@ -1050,8 +1045,6 @@ def copy(self): @classmethod def from_line(cls, line): - import pip_shims.shims - if isinstance(line, pip_shims.shims.InstallRequirement): line = format_requirement(line) hashes = None @@ -1200,7 +1193,6 @@ def from_pipfile(cls, name, pipfile): old_name = cls_inst.req.req.name or cls_inst.req.name if not cls_inst.is_named and not cls_inst.editable and not name: if cls_inst.is_vcs: - import pip_shims.shims ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) info = SetupInfo.from_ireq(ireq) if info is not None: @@ -1276,7 +1268,10 @@ def get_markers(self): return markers def get_specifier(self): - return Specifier(self.specifiers) + try: + return Specifier(self.specifiers) + except InvalidSpecifier: + return LegacySpecifier(self.specifiers) def get_version(self): return pip_shims.shims.parse_version(self.get_specifier().version) diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 2b47ee9bb0..0fac2aa3f8 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -95,7 +95,6 @@ def build_vcs_link(vcs, uri, name=None, ref=None, subdirectory=None, extras=None if extras: extras = extras_to_string(extras) uri = "{0}{1}".format(uri, extras) - # if subdirectory: if subdirectory: uri = "{0}&subdirectory={1}".format(uri, subdirectory) return create_link(uri) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 45ff0384c0..c106a59c56 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -21,20 +21,20 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.7 +pythonfinder==1.1.8 requests==2.20.0 chardet==3.0.4 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.2.5 +requirementslib==1.3.0 attrs==18.2.0 distlib==0.2.8 packaging==18.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.4.6 + tomlkit==0.5.2 shellingham==1.2.7 six==1.11.0 semver==2.8.1 From 96cbd58d84a04d319c422e3e2fcc96c184e48efb Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 13:36:58 -0500 Subject: [PATCH 15/23] Fix prefix comparison for py2 Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index c3c33dda24..b96f2fbe26 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) if not sources: sources = [] self.sources = sources From 9eabde0bbfa3dc61d71fc1efbc741047dc417663 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 16:31:53 -0500 Subject: [PATCH 16/23] no samefile for windows python2.7 Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index b96f2fbe26..9315447c30 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,8 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) + prefix = os.path.normcase(os.path.normpath(os.path.abspath(str(prefix)))) + self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] self.sources = sources From ef59d1520e9b84bd611587a2fd79680df19d53d9 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 22:27:38 -0500 Subject: [PATCH 17/23] Fix bugs in environment implementation - Fix virtualenv - Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/core.py | 5 +- pipenv/environment.py | 33 +++++----- pipenv/project.py | 5 +- pipenv/vendor/pythonfinder/environment.py | 4 ++ pipenv/vendor/pythonfinder/models/asdf.py | 9 +++ pipenv/vendor/pythonfinder/models/path.py | 69 +++++++++++++++------ pipenv/vendor/pythonfinder/models/pyenv.py | 7 ++- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/pythonfinder/utils.py | 6 +- 9 files changed, 96 insertions(+), 44 deletions(-) create mode 100644 pipenv/vendor/pythonfinder/models/asdf.py diff --git a/pipenv/core.py b/pipenv/core.py index c2df7b78a7..6576648ff4 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -921,7 +921,8 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): prefix=project.get_location_for_virtualenv(), is_venv=True, sources=sources, - pipfile=project.parsed_pipfile + pipfile=project.parsed_pipfile, + project=project ) project._environment.add_dist("pipenv") # Say where the virtualenv is. @@ -1621,7 +1622,7 @@ def do_outdated(pypi_mirror=None): outdated_packages = { canonicalize_name(pkg.project_name): package_info (pkg.project_name, pkg.parsed_version, pkg.latest_version) - for pkg in project.get_outdated_packages() + for pkg in project.environment.get_outdated_packages() } for result in installed_packages: dep = Requirement.from_line(str(result.as_requirement())) diff --git a/pipenv/environment.py b/pipenv/environment.py index 9315447c30..8548c38f52 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -22,7 +22,8 @@ class Environment(object): - def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, sources=None): + def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, + sources=None, project=None): super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET @@ -30,10 +31,17 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] + self.project = project + if project and not sources: + sources = project.sources self.sources = sources + if project and not pipfile: + pipfile = project.pipfile + self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix self.prefix = vistir.compat.Path(prefix) + self.sys_paths = get_paths() def safe_import(self, name): """Helper utility for reimporting previously imported modules while inside the env""" @@ -73,7 +81,7 @@ def resolve_dist(cls, dist, working_set): deps.add(dist) try: reqs = dist.requires() - except AttributeError: + except (AttributeError, OSError): # The METADATA file can't be found return deps for req in reqs: dist = working_set.find(req) @@ -187,12 +195,6 @@ def sys_path(self): path = json.loads(path.strip()) return path - @cached_property - def system_paths(self): - paths = {} - paths = get_paths() - return paths - @cached_property def sys_prefix(self): """The prefix run inside the context of the environment @@ -271,7 +273,8 @@ def get_installed_packages(self): packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] return packages - def get_finder(self): + @contextlib.contextmanager + def get_finder(self, pre=False): from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder from .environments import PIPENV_CACHE_DIR index_urls = [source.get("url") for source in self.sources] @@ -286,10 +289,10 @@ class PipCommand(Command): cmd_opts = pip_command.cmd_opts pip_command.parser.insert_option_group(0, index_opts) pip_command.parser.insert_option_group(0, cmd_opts) - pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources, []) + pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources) pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = PIPENV_CACHE_DIR - pip_options.pre = self.pipfile.get("pre", False) + pip_options.pre = self.pipfile.get("pre", pre) with pip_command._build_session(pip_options) as session: finder = PackageFinder( find_links=pip_options.find_links, @@ -300,7 +303,7 @@ class PipCommand(Command): ) yield finder - def get_package_info(self): + def get_package_info(self, pre=False): dependency_links = [] packages = self.get_installed_packages() # This code is borrowed from pip's current implementation @@ -314,7 +317,7 @@ def get_package_info(self): for dist in packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) - if not finder.pip_options.pre: + if not self.pipfile.get("pre", finder.allow_all_prereleases): # Remove prereleases all_candidates = [ candidate for candidate in all_candidates @@ -334,9 +337,9 @@ def get_package_info(self): dist.latest_filetype = typ yield dist - def get_outdated_packages(self): + def get_outdated_packages(self, pre=False): return [ - pkg for pkg in self.get_package_info() + pkg for pkg in self.get_package_info(pre=pre) if pkg.latest_version._version > pkg.parsed_version._version ] diff --git a/pipenv/project.py b/pipenv/project.py index 0eafff8e79..7857b25a82 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -352,13 +352,14 @@ def environment(self): is_venv = prefix == sys.prefix sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( - prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile, + project=self ) self._environment.add_dist("pipenv") return self._environment def get_outdated_packages(self): - return self.environment.get_outdated_packages() + return self.environment.get_outdated_packages(pre=self.pipfile.get("pre", False)) @classmethod def _sanitize(cls, name): diff --git a/pipenv/vendor/pythonfinder/environment.py b/pipenv/vendor/pythonfinder/environment.py index 27a5b3fc99..ec4a760fac 100644 --- a/pipenv/vendor/pythonfinder/environment.py +++ b/pipenv/vendor/pythonfinder/environment.py @@ -7,9 +7,13 @@ PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) +ASDF_INSTALLED = bool(os.environ.get("ASDF_DATA_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) +ASDF_DATA_DIR = os.path.expanduser( + os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) +) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] diff --git a/pipenv/vendor/pythonfinder/models/asdf.py b/pipenv/vendor/pythonfinder/models/asdf.py new file mode 100644 index 0000000000..3ba6e4faba --- /dev/null +++ b/pipenv/vendor/pythonfinder/models/asdf.py @@ -0,0 +1,9 @@ +# -*- coding=utf-8 -*- +import attr + +from .pyenv import PyenvFinder + + +@attr.s +class AsdfFinder(PyenvFinder): + version_root = attr.ib(default="installs/python/*") diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 3d01e7cf14..9c96e5f82f 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -17,7 +17,7 @@ from vistir.compat import Path, fs_str from .mixins import BasePath -from ..environment import PYENV_INSTALLED, PYENV_ROOT +from ..environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR from ..exceptions import InvalidPythonVersion from ..utils import ( ensure_path, @@ -40,6 +40,7 @@ class SystemPath(object): python_version_dict = attr.ib(default=attr.Factory(defaultdict)) only_python = attr.ib(default=False) pyenv_finder = attr.ib(default=None, validator=optional_instance_of("PyenvPath")) + asdf_finder = attr.ib(default=None) system = attr.ib(default=False) _version_dict = attr.ib(default=attr.Factory(defaultdict)) ignore_unsupported = attr.ib(default=False) @@ -105,6 +106,8 @@ def __attrs_post_init__(self): self._setup_windows() if PYENV_INSTALLED: self._setup_pyenv() + if ASDF_INSTALLED: + self._setup_asdf() venv = os.environ.get("VIRTUAL_ENV") if os.name == "nt": bin_dir = "Scripts" @@ -124,32 +127,62 @@ def __attrs_post_init__(self): path=syspath_bin, is_root=True, only_python=False ) - def _setup_pyenv(self): - from .pyenv import PyenvFinder - - last_pyenv = next( - (p for p in reversed(self.path_order) if PYENV_ROOT.lower() in p.lower()), + def _get_last_instance(self, path): + last_instance = next(iter( + (p for p in reversed(self.path_order) if path.lower() in p.lower())), None, ) try: - pyenv_index = self.path_order.index(last_pyenv) + path_index = self.path_order.index(last_instance) except ValueError: return + return path_index + + def _slice_in_paths(self, start_idx, paths): + before_path = self.path_order[: start_idx + 1] + after_path = self.path_order[start_idx + 2 :] + self.path_order = ( + before_path + [p.as_posix() for p in paths] + after_path + ) + + def _remove_path(self, path): + path_copy = reversed(self.path_order[:]) + new_order = [] + target = os.path.normcase(os.path.normpath(os.path.abspath(path))) + path_map = { + os.path.normcase(os.path.normpath(os.path.abspath(pth))): pth + for pth in self.paths.keys() + } + if target in path_map: + del self.paths[path_map.get(target)] + for current_path in path_copy: + normalized = os.path.normcase(os.path.normpath(os.path.abspath(current_path))) + if normalized != target: + new_order.append(normalized) + new_order = reversed(new_order) + self.path_order = new_order + + def _setup_asdf(self): + from .asdf import AsdfFinder + asdf_index = self._get_last_instance(ASDF_DATA_DIR) + self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) + root_paths = [p for p in self.asdf_finder.roots] + self._slice_in_paths(asdf_index, root_paths) + self.paths.update(self.asdf_finder.roots) + self._register_finder("asdf", self.asdf_finder) + + def _setup_pyenv(self): + from .pyenv import PyenvFinder + + pyenv_index = self._get_last_instance(PYENV_ROOT) self.pyenv_finder = PyenvFinder.create( root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) root_paths = [p for p in self.pyenv_finder.roots] - before_path = self.path_order[: pyenv_index + 1] - after_path = self.path_order[pyenv_index + 2 :] - self.path_order = ( - before_path + [p.as_posix() for p in root_paths] + after_path - ) - pyenv_shim_path = os.path.join(PYENV_ROOT, "shims") - if pyenv_shim_path in self.path_order: - self.path_order.remove(pyenv_shim_path) + self._slice_in_paths(pyenv_index, root_paths) + self.paths.update(self.pyenv_finder.roots) - if pyenv_shim_path in self.paths: - del self.paths[pyenv_shim_path] + self._remove_path(os.path.join(PYENV_ROOT, "shims")) self._register_finder("pyenv", self.pyenv_finder) def _setup_windows(self): @@ -396,7 +429,7 @@ def create( ) -@attr.s +@attr.s(slots=True) class PathEntry(BasePath): path = attr.ib(default=None, validator=optional_instance_of(Path)) _children = attr.ib(default=attr.Factory(dict)) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index ac7f8588ac..cf85f57a98 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) -@attr.s +@attr.s(slots=True) class PyenvFinder(BaseFinder, BasePath): root = attr.ib(default=None, validator=optional_instance_of(Path)) #: ignore_unsupported should come before versions, because its value is used @@ -34,6 +34,7 @@ class PyenvFinder(BaseFinder, BasePath): ignore_unsupported = attr.ib(default=True) paths = attr.ib(default=attr.Factory(list)) roots = attr.ib(default=attr.Factory(defaultdict)) + version_root = attr.ib(default="versions/*") versions = attr.ib() pythons = attr.ib() @@ -50,7 +51,7 @@ def get_version_order(self): version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() version_paths = [ - p for p in self.root.glob("versions/*") + p for p in self.root.glob(self.version_root) if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} @@ -74,7 +75,7 @@ def version_from_bin_dir(cls, base_dir, name=None): @versions.default def get_versions(self): versions = defaultdict() - bin_ = sysconfig._INSTALL_SCHEMES['posix_prefix']["scripts"] + bin_ = "{base}/bin" for p in self.get_version_order(): bin_dir = Path(bin_.format(base=p.as_posix())) version_path = None diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 24d520b6d2..583dc6b38a 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -21,7 +21,7 @@ ) -@attr.s +@attr.s(slots=True) class PythonVersion(object): major = attr.ib(default=0) minor = attr.ib(default=None) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 42a63e54f3..881cdb2eec 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -54,7 +54,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -92,7 +92,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_executable(path) and looks_like_python(path.name) + return path_is_known_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -117,7 +117,7 @@ def _filter_none(k, v): return False -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def filter_pythons(path): """Return all valid pythons in a given path""" if not isinstance(path, vistir.compat.Path): From cb601b0e5b67ec76d47b9a256e328da93bdb7574 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 23:22:48 -0500 Subject: [PATCH 18/23] Fix syntax Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 8548c38f52..8e96f2c8fd 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -36,7 +36,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No sources = project.sources self.sources = sources if project and not pipfile: - pipfile = project.pipfile + pipfile = project.parsed_pipfile self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix From 13c9e62029184b8b30de3afdc7b51b1cae9da062 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 00:39:20 -0500 Subject: [PATCH 19/23] Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 21 +++++++++++++++++---- pipenv/vendor/pythonfinder/utils.py | 12 ++++++------ 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 9c96e5f82f..523f117a63 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -26,6 +26,7 @@ optional_instance_of, path_is_known_executable, unnest, + normalize_path ) from .python import PythonVersion @@ -128,9 +129,10 @@ def __attrs_post_init__(self): ) def _get_last_instance(self, path): - last_instance = next(iter( - (p for p in reversed(self.path_order) if path.lower() in p.lower())), - None, + paths = [normalize_path(p) for p in reversed(self.path_order)] + normalized_target = normalize_path(path) + last_instance = next( + iter(p for p in paths if normalized_target in p), None ) try: path_index = self.path_order.index(last_instance) @@ -165,6 +167,10 @@ def _remove_path(self, path): def _setup_asdf(self): from .asdf import AsdfFinder asdf_index = self._get_last_instance(ASDF_DATA_DIR) + if not asdf_index: + # we are in a virtualenv without global pyenv on the path, so we should + # not write pyenv to the path here + return self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) root_paths = [p for p in self.asdf_finder.roots] self._slice_in_paths(asdf_index, root_paths) @@ -174,10 +180,14 @@ def _setup_asdf(self): def _setup_pyenv(self): from .pyenv import PyenvFinder - pyenv_index = self._get_last_instance(PYENV_ROOT) self.pyenv_finder = PyenvFinder.create( root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) + pyenv_index = self._get_last_instance(PYENV_ROOT) + if not pyenv_index: + # we are in a virtualenv without global pyenv on the path, so we should + # not write pyenv to the path here + return root_paths = [p for p in self.pyenv_finder.roots] self._slice_in_paths(pyenv_index, root_paths) @@ -485,6 +495,9 @@ def get_py_version(self): py_version = PythonVersion.from_path(path=self, name=self.name) except InvalidPythonVersion: py_version = None + except Exception: + if not IGNORE_UNSUPPORTED: + raise return py_version return diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 881cdb2eec..b8714f52b6 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -2,13 +2,9 @@ from __future__ import absolute_import, print_function import itertools -import locale import os -import subprocess -import sys from fnmatch import fnmatch -from itertools import chain import attr import six @@ -54,7 +50,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -92,7 +88,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_known_executable(path) and looks_like_python(path.name) + return path_is_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -117,6 +113,10 @@ def _filter_none(k, v): return False +def normalize_path(path): + return os.path.normpath(os.path.normcase(os.path.abspath(path))) + + @lru_cache(maxsize=1024) def filter_pythons(path): """Return all valid pythons in a given path""" From d73879b5e57773973fdeb362d97bef7f46762046 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 01:01:49 -0500 Subject: [PATCH 20/23] Update requirementslib - Fix ref parsing - Fixes #3214 Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/models/requirements.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 05fd19438c..f6c985d303 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.6' +__version__ = '1.2.7' import logging import warnings diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index d5330b48f4..d034a12d95 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -891,8 +891,8 @@ def from_line(cls, line, editable=None, extras=None): name = link.egg_fragment subdirectory = link.subdirectory_fragment ref = None - if "@" in link.show_url and "@" in uri: - uri, ref = uri.rsplit("@", 1) + if "@" in link.path and "@" in uri: + uri, _, ref = uri.rpartition("@") if relpath and "@" in relpath: relpath, ref = relpath.rsplit("@", 1) return cls( From e328ae24dfb61e5d207f1a64f85b9ea949092ed1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 01:23:19 -0500 Subject: [PATCH 21/23] Fix feedback Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 6 +++--- pipenv/vendor/pythonfinder/utils.py | 2 +- pipenv/vendor/vendor.txt | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 523f117a63..df755fa63b 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -150,15 +150,15 @@ def _slice_in_paths(self, start_idx, paths): def _remove_path(self, path): path_copy = reversed(self.path_order[:]) new_order = [] - target = os.path.normcase(os.path.normpath(os.path.abspath(path))) + target = normalize_path(path) path_map = { - os.path.normcase(os.path.normpath(os.path.abspath(pth))): pth + normalize_path(pth): pth for pth in self.paths.keys() } if target in path_map: del self.paths[path_map.get(target)] for current_path in path_copy: - normalized = os.path.normcase(os.path.normpath(os.path.abspath(current_path))) + normalized = normalize_path(current_path) if normalized != target: new_order.append(normalized) new_order = reversed(new_order) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index b8714f52b6..fb932b10e9 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -114,7 +114,7 @@ def _filter_none(k, v): def normalize_path(path): - return os.path.normpath(os.path.normcase(os.path.abspath(path))) + return os.path.normpath(os.path.normcase(os.path.abspath(str(path)))) @lru_cache(maxsize=1024) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c106a59c56..623178531c 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -34,7 +34,7 @@ requirementslib==1.3.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.5.2 + tomlkit==0.4.6 shellingham==1.2.7 six==1.11.0 semver==2.8.1 From 310e0b293bf5febd035430d2a2f80e5c3158c5c8 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 10:17:19 -0500 Subject: [PATCH 22/23] Fix pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 43 +++- pipenv/vendor/pythonfinder/models/pyenv.py | 2 - pipenv/vendor/pythonfinder/models/python.py | 221 +++++++++++++++++++- pipenv/vendor/pythonfinder/utils.py | 35 +++- 4 files changed, 284 insertions(+), 17 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index df755fa63b..d3cdd9d168 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -26,7 +26,9 @@ optional_instance_of, path_is_known_executable, unnest, - normalize_path + normalize_path, + parse_pyenv_version_order, + parse_asdf_version_order ) from .python import PythonVersion @@ -165,23 +167,26 @@ def _remove_path(self, path): self.path_order = new_order def _setup_asdf(self): - from .asdf import AsdfFinder + from .python import PythonFinder asdf_index = self._get_last_instance(ASDF_DATA_DIR) if not asdf_index: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here return - self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) + self.asdf_finder = PythonFinder.create( + root=ASDF_DATA_DIR, ignore_unsupported=True, + sort_function=parse_asdf_version_order, version_glob_path="installs/python/*") root_paths = [p for p in self.asdf_finder.roots] self._slice_in_paths(asdf_index, root_paths) self.paths.update(self.asdf_finder.roots) self._register_finder("asdf", self.asdf_finder) def _setup_pyenv(self): - from .pyenv import PyenvFinder + from .python import PythonFinder - self.pyenv_finder = PyenvFinder.create( - root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported + self.pyenv_finder = PythonFinder.create( + root=PYENV_ROOT, sort_function=parse_pyenv_version_order, + version_glob_path="versions/*", ignore_unsupported=self.ignore_unsupported ) pyenv_index = self._get_last_instance(PYENV_ROOT) if not pyenv_index: @@ -585,3 +590,29 @@ def is_python(self): return self.is_executable and ( looks_like_python(self.path.name) ) + + +@attr.s +class VersionPath(SystemPath): + base = attr.ib(default=None, validator=optional_instance_of(Path)) + name = attr.ib(default=None) + + @classmethod + def create(cls, path, only_python=True, pythons=None, name=None): + """Accepts a path to a base python version directory. + + Generates the version listings for it""" + from .path import PathEntry + path = ensure_path(path) + path_entries = defaultdict(PathEntry) + bin_ = "{base}/bin" + if path.as_posix().endswith(Path(bin_).name): + path = path.parent + bin_dir = ensure_path(bin_.format(base=path.as_posix())) + if not name: + name = path.name + current_entry = PathEntry.create( + bin_dir, is_root=True, only_python=True, pythons=pythons, name=name + ) + path_entries[bin_dir.as_posix()] = current_entry + return cls(name=name, base=bin_dir, paths=path_entries) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index cf85f57a98..6f2d6422d5 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -14,8 +14,6 @@ from ..utils import ( ensure_path, optional_instance_of, - get_python_version, - filter_pythons, unnest, ) from .mixins import BaseFinder, BasePath diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 583dc6b38a..7feee84e85 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -3,23 +3,238 @@ import copy import platform +import operator +import logging from collections import defaultdict import attr -from packaging.version import Version, LegacyVersion +from packaging.version import Version from packaging.version import parse as parse_version +from vistir.compat import Path -from ..environment import SYSTEM_ARCH +from ..environment import SYSTEM_ARCH, PYENV_ROOT, ASDF_DATA_DIR +from .mixins import BaseFinder, BasePath from ..utils import ( _filter_none, ensure_path, get_python_version, optional_instance_of, - ensure_path, + unnest, + is_in_path, + parse_pyenv_version_order, + parse_asdf_version_order, ) +logger = logging.getLogger(__name__) + + +@attr.s(slots=True) +class PythonFinder(BaseFinder, BasePath): + root = attr.ib(default=None, validator=optional_instance_of(Path)) + #: ignore_unsupported should come before versions, because its value is used + #: in versions's default initializer. + ignore_unsupported = attr.ib(default=True) + #: The function to use to sort version order when returning an ordered verion set + sort_function = attr.ib(default=None) + paths = attr.ib(default=attr.Factory(list)) + roots = attr.ib(default=attr.Factory(defaultdict)) + #: Glob path for python versions off of the root directory + version_glob_path = attr.ib(default="versions/*") + versions = attr.ib() + pythons = attr.ib() + + @property + def expanded_paths(self): + return ( + path for path in unnest(p for p in self.versions.values()) + if path is not None + ) + + @property + def is_pyenv(self): + return is_in_path(str(self.root), PYENV_ROOT) + + @property + def is_asdf(self): + return is_in_path(str(self.root), ASDF_DATA_DIR) + + def get_version_order(self): + version_paths = [ + p for p in self.root.glob(self.version_glob_path) + if not (p.parent.name == "envs" or p.name == "envs") + ] + versions = {v.name: v for v in version_paths} + if self.is_pyenv: + version_order = [versions[v] for v in parse_pyenv_version_order()] + elif self.is_asdf: + version_order = [versions[v] for v in parse_asdf_version_order()] + for version in version_order: + version_paths.remove(version) + if version_order: + version_order += version_paths + else: + version_order = version_paths + return version_order + + @classmethod + def version_from_bin_dir(cls, base_dir, name=None): + from .path import PathEntry + py_version = None + version_path = PathEntry.create( + path=base_dir.absolute().as_posix(), + only_python=True, + name=base_dir.parent.name, + ) + py_version = next(iter(version_path.find_all_python_versions()), None) + return py_version + + @versions.default + def get_versions(self): + from .path import PathEntry + versions = defaultdict() + bin_ = "{base}/bin" + for p in self.get_version_order(): + bin_dir = Path(bin_.format(base=p.as_posix())) + version_path = None + if bin_dir.exists(): + version_path = PathEntry.create( + path=bin_dir.absolute().as_posix(), + only_python=False, + name=p.name, + is_root=True, + ) + version = None + try: + version = PythonVersion.parse(p.name) + except ValueError: + entry = next(iter(version_path.find_all_python_versions()), None) + if not entry: + if self.ignore_unsupported: + continue + raise + else: + version = entry.py_version.as_dict() + except Exception: + if not self.ignore_unsupported: + raise + logger.warning( + "Unsupported Python version %r, ignoring...", p.name, exc_info=True + ) + continue + if not version: + continue + version_tuple = ( + version.get("major"), + version.get("minor"), + version.get("patch"), + version.get("is_prerelease"), + version.get("is_devrelease"), + version.get("is_debug"), + ) + self.roots[p] = version_path + versions[version_tuple] = version_path + self.paths.append(version_path) + return versions + + @pythons.default + def get_pythons(self): + pythons = defaultdict() + for p in self.paths: + pythons.update(p.pythons) + return pythons + + @classmethod + def create(cls, root, sort_function=None, version_glob_path=None, ignore_unsupported=True): + root = ensure_path(root) + if not version_glob_path: + version_glob_path = "versions/*" + return cls(root=root, ignore_unsupported=ignore_unsupported, + sort_function=sort_function, version_glob_path=version_glob_path) + + def find_all_python_versions( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search for a specific python version on the path. Return all copies + + :param major: Major python version to search for. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. + :rtype: List[:class:`~pythonfinder.models.PathEntry`] + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + py = operator.attrgetter("as_python") + pythons = ( + py_ver for py_ver in (py(p) for p in self.pythons.values() if p is not None) + if py_ver is not None + ) + # pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return sorted(matching_versions, key=version_sort, reverse=True) + + def find_python_version( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search or self for the specified Python version and return the first match. + + :param major: Major version number. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return next(iter(c for c in sorted(matching_versions, key=version_sort, reverse=True)), None) + @attr.s(slots=True) class PythonVersion(object): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index fb932b10e9..9c71e38075 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -7,10 +7,12 @@ from fnmatch import fnmatch import attr +import io import six import vistir +from .environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR from .exceptions import InvalidPythonVersion try: @@ -127,12 +129,6 @@ def filter_pythons(path): return filter(lambda x: path_is_python(x), path.iterdir()) -# def unnest(item): -# if isinstance(next((i for i in item), None), (list, tuple)): -# return chain(*filter(None, item)) -# return chain(filter(None, item)) - - def unnest(item): if isinstance(item, Iterable) and not isinstance(item, six.string_types): item, target = itertools.tee(item, 2) @@ -145,3 +141,30 @@ def unnest(item): yield sub else: yield el + + +def parse_pyenv_version_order(filename="version"): + version_order_file = normalize_path(os.path.join(PYENV_ROOT, filename)) + if os.path.exists(version_order_file) and os.path.isfile(version_order_file): + with io.open(version_order_file, encoding="utf-8") as fh: + contents = fh.read() + version_order = [v for v in contents.splitlines()] + return version_order + + +def parse_asdf_version_order(filename=".tool-versions"): + version_order_file = normalize_path(os.path.join("~", filename)) + if os.path.exists(version_order_file) and os.path.isfile(version_order_file): + with io.open(version_order_file, encoding="utf-8") as fh: + contents = fh.read() + python_section = next(iter( + line for line in contents.splitlines() if line.startswith("python") + ), None) + if python_section: + python_key, versions = python_section.partition() + if versions: + return versions.split() + + +def is_in_path(path, parent): + return normalize_path(str(path)).startswith(normalize_path(str(parent))) From 6b3c9a7eb79564daad03ba32aee318a6e8ce8195 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 10:18:15 -0500 Subject: [PATCH 23/23] Remove accidentally committed test script Signed-off-by: Dan Ryan --- pipenv/test_script.py | 32 -------------------------------- 1 file changed, 32 deletions(-) delete mode 100644 pipenv/test_script.py diff --git a/pipenv/test_script.py b/pipenv/test_script.py deleted file mode 100644 index d599ded637..0000000000 --- a/pipenv/test_script.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding=utf-8 -*- - -import os -import sys - - -def _patch_path(): - import site - pipenv_libdir = os.path.dirname(os.path.abspath(__file__)) - pipenv_site_dir = os.path.dirname(pipenv_libdir) - site.addsitedir(pipenv_site_dir) - for _dir in ("vendor", "patched"): - sys.path.insert(0, os.path.join(pipenv_libdir, _dir)) - - -def test_install(): - from pipenv.vendor.vistir.contextmanagers import cd - from pipenv.vendor.click.testing import CliRunner - runner = CliRunner() - with cd("/tmp/test"): - from pipenv.core import do_lock - locked = do_lock(system=False, clear=False, pre=False, keep_outdated=False, - write=True, pypi_mirror=None) - # result = runner.invoke(cli, ["lock", "--verbose"]) - # print(result.output) - # print(result.exit_code) - print(locked) - - -if __name__ == "__main__": - _patch_path() - test_install()