From 80a6dcbedde656eb708290b42798b3d3351694e2 Mon Sep 17 00:00:00 2001 From: devxpy Date: Sat, 27 Oct 2018 23:55:34 +0530 Subject: [PATCH 01/71] add PIPENV_SKIP_LOCK envvar, and suitable doc --- pipenv/cli/options.py | 1 + pipenv/environments.py | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index 99fc5344bf..940bbf77c0 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -127,6 +127,7 @@ def callback(ctx, param, value): return value return option("--skip-lock", is_flag=True, default=False, expose_value=False, help=u"Ignore locking mechanisms when installing—use the Pipfile, instead.", + envvar="PIPENV_SKIP_LOCK", callback=callback, type=click_booltype)(f) diff --git a/pipenv/environments.py b/pipenv/environments.py index 4bfd937d88..45d5f720ec 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -197,6 +197,16 @@ if interactive. """ +PIPENV_SKIP_LOCK = False +"""If set, Pipenv won't lock dependencies automatically. + +This might be desirable if a project has large number of dependencies, +because locking is an inherently slow operation. + +Default is to lock dependencies and update ``Pipfile.lock`` on each run. + +NOTE: This only affects the ``install`` and ``uninstall`` commands. +""" # Internal, support running in a different Python from sys.executable. PIPENV_PYTHON = os.environ.get("PIPENV_PYTHON") From 023e7861c3ed1369fba84ff9ba9b70424251c2b3 Mon Sep 17 00:00:00 2001 From: devxpy Date: Sun, 28 Oct 2018 00:36:45 +0530 Subject: [PATCH 02/71] fix PIPENV_SKIP_LOCK for `pipenv unisntall` command. --- pipenv/cli/command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 1ce9fee944..d2f288d305 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -284,7 +284,7 @@ def uninstall( three=state.three, python=state.python, system=state.system, - lock=lock, + lock=not state.installstate.skip_lock, all_dev=all_dev, all=all, keep_outdated=state.installstate.keep_outdated, From 2b195dd789250b57547fe58f168a3f39e57c98d4 Mon Sep 17 00:00:00 2001 From: devxpy Date: Thu, 8 Nov 2018 06:00:09 +0530 Subject: [PATCH 03/71] fix errors from merge --- pipenv/cli/options.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index fa20fc9403..5c4a991f9a 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -126,7 +126,6 @@ def callback(ctx, param, value): state.installstate.skip_lock = value return value return option("--skip-lock", is_flag=True, default=False, expose_value=False, - help=u"Ignore locking mechanisms when installing—use the Pipfile, instead.", help=u"Skip locking mechanisms and use the Pipfile instead during operation.", envvar="PIPENV_SKIP_LOCK", callback=callback, type=click.types.BOOL)(f) From 3eba19d359ee5144f0d21c643b8617fe3fafbe92 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 10:22:26 +0800 Subject: [PATCH 04/71] Improve toml parsing --- pipenv/project.py | 70 ++++++++++--------------------- tests/integration/test_project.py | 18 ++++++++ tests/unit/test_vendor.py | 6 --- 3 files changed, 40 insertions(+), 54 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index 26b4cf0ce6..4b90f4934d 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -8,7 +8,6 @@ import base64 import fnmatch import hashlib -import contoml from first import first from cached_property import cached_property import operator @@ -578,60 +577,31 @@ def clear_pipfile_cache(self): _pipfile_cache.clear() @staticmethod - def dump_dict(dictionary, write_to, inline=False): - """ - Perform a nested recursive translation of a dictionary structure to a toml object. - - :param dictionary: A base dictionary to translate - :param write_to: The root node which will be mutated by the operation - :param inline: Whether to create inline tables for dictionaries, defaults to False - :return: A new toml hierarchical document - """ - - - def gen_table(inline=False): - if inline: - return tomlkit.inline_table() - return tomlkit.table() - - for key, value in dictionary.items(): - if isinstance(value, dict): - table = gen_table(inline=inline) - for sub_key, sub_value in value.items(): - if isinstance(sub_value, dict): - table[sub_key] = Project.dump_dict( - sub_value, gen_table(inline), inline=inline - ) - else: - table[sub_key] = sub_value - write_to[key] = table - else: - write_to[key] = Project.dump_dict(value, gen_table(inline), inline=inline) + def convert_outline_table(parsed): + """Converts all outline to inline tables""" + if hasattr(parsed, "_body"): # Duck-type that implies tomlkit.api.Container. + empty_inline_table = tomlkit.inline_table else: - write_to[key] = value - return write_to + empty_inline_table = toml.TomlDecoder().get_empty_inline_table + for section in ("packages", "dev-packages"): + table_data = parsed.get(section, {}) + for package, value in table_data.items(): + if hasattr(value, "keys"): + table = empty_inline_table() + table.update(value) + table_data[package] = table + return parsed def _parse_pipfile(self, contents): - # If any outline tables are present... try: data = tomlkit.parse(contents) - # Convert all outline tables to inline tables. - for section in ("packages", "dev-packages"): - table_data = data.get(section, tomlkit.table()) - for package, value in table_data.items(): - if isinstance(value, dict): - table = tomlkit.inline_table() - table.update(value) - table_data[package] = table - else: - table_data[package] = value - data[section] = table_data - return data except Exception: # We lose comments here, but it's for the best.) # Fallback to toml parser, for large files. - toml_decoder = toml.decoder.TomlDecoder() - return toml.loads(contents, decoder=toml_decoder) + data = toml.loads(contents) + if "[packages." in contents or "[dev-packages." in contents: + data = self.convert_outline_table(data) + return data def _read_pyproject(self): pyproject = self.path_to("pyproject.toml") @@ -886,7 +856,11 @@ def write_toml(self, data, path=None): if path is None: path = self.pipfile_location try: - formatted_data = tomlkit.dumps(data).rstrip() + if hasattr(data, "_body"): + formatted_data = tomlkit.dumps(data).rstrip() + else: + encoder = toml.encoder.TomlPreserveInlineDictEncoder() + formatted_data = toml.dumps(data, encoder=encoder) except Exception: document = tomlkit.document() for section in ("packages", "dev-packages"): diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index 5deccc84fa..1e00bbb70b 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -143,3 +143,21 @@ def test_many_indexes(PipenvInstance, pypi): f.write(contents) c = p.pipenv('install') assert c.return_code == 0 + + +@pytest.mark.install +@pytest.mark.project +def test_rewrite_outline_table(PipenvInstance, pypi): + with PipenvInstance(pypi=pypi, chdir=True) as p: + with open(p.pipfile_path, 'w') as f: + contents = """ +[packages.requests] +version = "*" + """.strip() + f.write(contents) + c = p.pipenv('install click') + assert c.return_code == 0 + with open(p.pipfile_path) as f: + contents = f.read() + assert "[packages.requests]" not in contents + assert 'requests = { version = "*" }' in contents diff --git a/tests/unit/test_vendor.py b/tests/unit/test_vendor.py index 1894b6fff5..704b37fb7f 100644 --- a/tests/unit/test_vendor.py +++ b/tests/unit/test_vendor.py @@ -106,9 +106,3 @@ def test_inject_environment_variables(self): def test_token_date(dt, content): token = create_primitive_token(dt) assert token == tokens.Token(tokens.TYPE_DATE, content) - - -def test_dump_nonascii_string(): - content = 'name = "Stažené"\n' - toml_content = contoml.dumps(contoml.loads(content)) - assert toml_content == content From e147dc3ade4040e2b59c170c752f080839845264 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 10:57:20 +0800 Subject: [PATCH 05/71] Drops prettytoml/contoml form vendors --- news/3191.vendor.rst | 1 + pipenv/patched/contoml/LICENSE | 22 - pipenv/patched/contoml/__init__.py | 48 --- pipenv/patched/contoml/_version.py | 1 - pipenv/patched/contoml/file/__init__.py | 3 - pipenv/patched/contoml/file/array.py | 40 -- pipenv/patched/contoml/file/cascadedict.py | 56 --- pipenv/patched/contoml/file/file.py | 293 -------------- pipenv/patched/contoml/file/freshtable.py | 45 --- pipenv/patched/contoml/file/peekableit.py | 30 -- pipenv/patched/contoml/file/raw.py | 16 - pipenv/patched/contoml/file/structurer.py | 116 ------ .../patched/contoml/file/test_cascadedict.py | 25 -- pipenv/patched/contoml/file/test_entries.py | 20 - .../patched/contoml/file/test_peekableit.py | 12 - .../patched/contoml/file/test_structurer.py | 41 -- pipenv/patched/contoml/file/toplevels.py | 142 ------- pipenv/patched/patched.txt | 1 - pipenv/patched/prettytoml/LICENSE | 22 - pipenv/patched/prettytoml/__init__.py | 25 -- pipenv/patched/prettytoml/_version.py | 1 - .../patched/prettytoml/elements/__init__.py | 13 - .../prettytoml/elements/abstracttable.py | 92 ----- pipenv/patched/prettytoml/elements/array.py | 136 ------- pipenv/patched/prettytoml/elements/atomic.py | 52 --- pipenv/patched/prettytoml/elements/common.py | 101 ----- pipenv/patched/prettytoml/elements/errors.py | 13 - pipenv/patched/prettytoml/elements/factory.py | 152 ------- .../prettytoml/elements/inlinetable.py | 78 ---- .../patched/prettytoml/elements/metadata.py | 80 ---- pipenv/patched/prettytoml/elements/table.py | 122 ------ .../prettytoml/elements/tableheader.py | 95 ----- .../patched/prettytoml/elements/test_array.py | 67 ---- .../prettytoml/elements/test_atomic.py | 9 - .../prettytoml/elements/test_common.py | 89 ----- .../prettytoml/elements/test_factory.py | 22 - .../prettytoml/elements/test_inlinetable.py | 52 --- .../prettytoml/elements/test_metadata.py | 25 -- .../patched/prettytoml/elements/test_table.py | 59 --- .../prettytoml/elements/test_tableheader.py | 12 - .../prettytoml/elements/test_traversal.py | 18 - .../prettytoml/elements/traversal/__init__.py | 175 -------- .../elements/traversal/predicates.py | 48 --- pipenv/patched/prettytoml/errors.py | 32 -- pipenv/patched/prettytoml/lexer/__init__.py | 123 ------ pipenv/patched/prettytoml/lexer/test_lexer.py | 153 ------- pipenv/patched/prettytoml/parser/__init__.py | 34 -- .../prettytoml/parser/elementsanitizer.py | 58 --- pipenv/patched/prettytoml/parser/errors.py | 17 - pipenv/patched/prettytoml/parser/parser.py | 376 ------------------ pipenv/patched/prettytoml/parser/recdesc.py | 114 ------ .../patched/prettytoml/parser/test_parser.py | 156 -------- .../patched/prettytoml/parser/tokenstream.py | 39 -- .../patched/prettytoml/prettifier/__init__.py | 39 -- .../prettytoml/prettifier/commentspace.py | 35 -- .../patched/prettytoml/prettifier/common.py | 54 --- .../prettifier/deindentanonymoustable.py | 43 -- .../prettytoml/prettifier/linelength.py | 62 --- .../prettytoml/prettifier/tableassignment.py | 40 -- .../prettytoml/prettifier/tableentrysort.py | 38 -- .../prettytoml/prettifier/tableindent.py | 49 --- .../patched/prettytoml/prettifier/tablesep.py | 31 -- .../prettifier/test_commentspace.py | 28 -- .../prettifier/test_deindentanonymoustable.py | 22 - .../prettytoml/prettifier/test_linelength.py | 39 -- .../prettifier/test_tableassignment.py | 29 -- .../prettifier/test_tableentrysort.py | 45 --- .../prettytoml/prettifier/test_tableindent.py | 25 -- .../prettytoml/prettifier/test_tablesep.py | 34 -- pipenv/patched/prettytoml/test_prettifier.py | 12 - pipenv/patched/prettytoml/test_util.py | 22 - pipenv/patched/prettytoml/tokens/__init__.py | 136 ------- pipenv/patched/prettytoml/tokens/errors.py | 13 - pipenv/patched/prettytoml/tokens/py2toml.py | 154 ------- .../patched/prettytoml/tokens/test_py2toml.py | 69 ---- .../patched/prettytoml/tokens/test_toml2py.py | 86 ---- pipenv/patched/prettytoml/tokens/toml2py.py | 130 ------ pipenv/patched/prettytoml/util.py | 141 ------- pipenv/project.py | 8 +- tasks/vendoring/patches/patched/contoml.patch | 28 -- .../patched/prettytoml-newlinefix.patch | 13 - .../patches/patched/prettytoml-python37.patch | 32 -- .../patched/prettytoml-table-iter.patch | 29 -- .../patches/patched/prettytoml-unicode.patch | 132 ------ .../patches/patched/prettytoml.patch | 78 ---- tests/integration/test_project.py | 2 +- tests/unit/test_vendor.py | 70 +--- 87 files changed, 5 insertions(+), 5335 deletions(-) create mode 100644 news/3191.vendor.rst delete mode 100644 pipenv/patched/contoml/LICENSE delete mode 100644 pipenv/patched/contoml/__init__.py delete mode 100644 pipenv/patched/contoml/_version.py delete mode 100644 pipenv/patched/contoml/file/__init__.py delete mode 100644 pipenv/patched/contoml/file/array.py delete mode 100644 pipenv/patched/contoml/file/cascadedict.py delete mode 100644 pipenv/patched/contoml/file/file.py delete mode 100644 pipenv/patched/contoml/file/freshtable.py delete mode 100644 pipenv/patched/contoml/file/peekableit.py delete mode 100644 pipenv/patched/contoml/file/raw.py delete mode 100644 pipenv/patched/contoml/file/structurer.py delete mode 100644 pipenv/patched/contoml/file/test_cascadedict.py delete mode 100644 pipenv/patched/contoml/file/test_entries.py delete mode 100644 pipenv/patched/contoml/file/test_peekableit.py delete mode 100644 pipenv/patched/contoml/file/test_structurer.py delete mode 100644 pipenv/patched/contoml/file/toplevels.py delete mode 100644 pipenv/patched/prettytoml/LICENSE delete mode 100644 pipenv/patched/prettytoml/__init__.py delete mode 100644 pipenv/patched/prettytoml/_version.py delete mode 100644 pipenv/patched/prettytoml/elements/__init__.py delete mode 100644 pipenv/patched/prettytoml/elements/abstracttable.py delete mode 100644 pipenv/patched/prettytoml/elements/array.py delete mode 100644 pipenv/patched/prettytoml/elements/atomic.py delete mode 100644 pipenv/patched/prettytoml/elements/common.py delete mode 100644 pipenv/patched/prettytoml/elements/errors.py delete mode 100644 pipenv/patched/prettytoml/elements/factory.py delete mode 100644 pipenv/patched/prettytoml/elements/inlinetable.py delete mode 100644 pipenv/patched/prettytoml/elements/metadata.py delete mode 100644 pipenv/patched/prettytoml/elements/table.py delete mode 100644 pipenv/patched/prettytoml/elements/tableheader.py delete mode 100644 pipenv/patched/prettytoml/elements/test_array.py delete mode 100644 pipenv/patched/prettytoml/elements/test_atomic.py delete mode 100644 pipenv/patched/prettytoml/elements/test_common.py delete mode 100644 pipenv/patched/prettytoml/elements/test_factory.py delete mode 100644 pipenv/patched/prettytoml/elements/test_inlinetable.py delete mode 100644 pipenv/patched/prettytoml/elements/test_metadata.py delete mode 100644 pipenv/patched/prettytoml/elements/test_table.py delete mode 100644 pipenv/patched/prettytoml/elements/test_tableheader.py delete mode 100644 pipenv/patched/prettytoml/elements/test_traversal.py delete mode 100644 pipenv/patched/prettytoml/elements/traversal/__init__.py delete mode 100644 pipenv/patched/prettytoml/elements/traversal/predicates.py delete mode 100644 pipenv/patched/prettytoml/errors.py delete mode 100644 pipenv/patched/prettytoml/lexer/__init__.py delete mode 100644 pipenv/patched/prettytoml/lexer/test_lexer.py delete mode 100644 pipenv/patched/prettytoml/parser/__init__.py delete mode 100644 pipenv/patched/prettytoml/parser/elementsanitizer.py delete mode 100644 pipenv/patched/prettytoml/parser/errors.py delete mode 100644 pipenv/patched/prettytoml/parser/parser.py delete mode 100644 pipenv/patched/prettytoml/parser/recdesc.py delete mode 100644 pipenv/patched/prettytoml/parser/test_parser.py delete mode 100644 pipenv/patched/prettytoml/parser/tokenstream.py delete mode 100644 pipenv/patched/prettytoml/prettifier/__init__.py delete mode 100644 pipenv/patched/prettytoml/prettifier/commentspace.py delete mode 100644 pipenv/patched/prettytoml/prettifier/common.py delete mode 100644 pipenv/patched/prettytoml/prettifier/deindentanonymoustable.py delete mode 100644 pipenv/patched/prettytoml/prettifier/linelength.py delete mode 100644 pipenv/patched/prettytoml/prettifier/tableassignment.py delete mode 100644 pipenv/patched/prettytoml/prettifier/tableentrysort.py delete mode 100644 pipenv/patched/prettytoml/prettifier/tableindent.py delete mode 100644 pipenv/patched/prettytoml/prettifier/tablesep.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_commentspace.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_deindentanonymoustable.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_linelength.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_tableassignment.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_tableentrysort.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_tableindent.py delete mode 100644 pipenv/patched/prettytoml/prettifier/test_tablesep.py delete mode 100644 pipenv/patched/prettytoml/test_prettifier.py delete mode 100644 pipenv/patched/prettytoml/test_util.py delete mode 100644 pipenv/patched/prettytoml/tokens/__init__.py delete mode 100644 pipenv/patched/prettytoml/tokens/errors.py delete mode 100644 pipenv/patched/prettytoml/tokens/py2toml.py delete mode 100644 pipenv/patched/prettytoml/tokens/test_py2toml.py delete mode 100644 pipenv/patched/prettytoml/tokens/test_toml2py.py delete mode 100644 pipenv/patched/prettytoml/tokens/toml2py.py delete mode 100644 pipenv/patched/prettytoml/util.py delete mode 100644 tasks/vendoring/patches/patched/contoml.patch delete mode 100644 tasks/vendoring/patches/patched/prettytoml-newlinefix.patch delete mode 100644 tasks/vendoring/patches/patched/prettytoml-python37.patch delete mode 100644 tasks/vendoring/patches/patched/prettytoml-table-iter.patch delete mode 100644 tasks/vendoring/patches/patched/prettytoml-unicode.patch delete mode 100644 tasks/vendoring/patches/patched/prettytoml.patch diff --git a/news/3191.vendor.rst b/news/3191.vendor.rst new file mode 100644 index 0000000000..3806b68fbc --- /dev/null +++ b/news/3191.vendor.rst @@ -0,0 +1 @@ +Switch to ``tomlkit`` for parsing and writing. Drop ``prettytoml`` and ``contoml`` from vendors. diff --git a/pipenv/patched/contoml/LICENSE b/pipenv/patched/contoml/LICENSE deleted file mode 100644 index 116fa4e558..0000000000 --- a/pipenv/patched/contoml/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Jumpscale - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/pipenv/patched/contoml/__init__.py b/pipenv/patched/contoml/__init__.py deleted file mode 100644 index 9dba5e206c..0000000000 --- a/pipenv/patched/contoml/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -from ._version import VERSION - -__version__ = VERSION - - -def loads(text): - """ - Parses TOML text into a dict-like object and returns it. - """ - from prettytoml.parser import parse_tokens - from prettytoml.lexer import tokenize as lexer - from .file import TOMLFile - - tokens = tuple(lexer(text, is_top_level=True)) - elements = parse_tokens(tokens) - return TOMLFile(elements) - - -def load(file_path): - """ - Parses a TOML file into a dict-like object and returns it. - """ - return loads(open(file_path).read()) - - -def dumps(value): - """ - Dumps a data structure to TOML source code. - - The given value must be either a dict of dict values, a dict, or a TOML file constructed by this module. - """ - - from contoml.file.file import TOMLFile - - if not isinstance(value, TOMLFile): - raise RuntimeError("Can only dump a TOMLFile instance loaded by load() or loads()") - - return value.dumps() - - -def dump(obj, file_path, prettify=False): - """ - Dumps a data structure to the filesystem as TOML. - - The given value must be either a dict of dict values, a dict, or a TOML file constructed by this module. - """ - with open(file_path, 'w') as fp: - fp.write(dumps(obj)) diff --git a/pipenv/patched/contoml/_version.py b/pipenv/patched/contoml/_version.py deleted file mode 100644 index e0f154708e..0000000000 --- a/pipenv/patched/contoml/_version.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = 'master' diff --git a/pipenv/patched/contoml/file/__init__.py b/pipenv/patched/contoml/file/__init__.py deleted file mode 100644 index 1aba5121c2..0000000000 --- a/pipenv/patched/contoml/file/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ - - -from .file import TOMLFile diff --git a/pipenv/patched/contoml/file/array.py b/pipenv/patched/contoml/file/array.py deleted file mode 100644 index 40e5acb3ec..0000000000 --- a/pipenv/patched/contoml/file/array.py +++ /dev/null @@ -1,40 +0,0 @@ -from prettytoml.elements.table import TableElement -from prettytoml.errors import InvalidValueError -from contoml.file.freshtable import FreshTable -from prettytoml import util - - -class ArrayOfTables(list): - - def __init__(self, toml_file, name, iterable=None): - if iterable: - list.__init__(self, iterable) - self._name = name - self._toml_file = toml_file - - def append(self, value): - if isinstance(value, dict): - table = FreshTable(parent=self, name=self._name, is_array=True) - table._append_to_parent() - index = len(self._toml_file[self._name]) - 1 - for key_seq, value in util.flatten_nested(value).items(): - # self._toml_file._setitem_with_key_seq((self._name, index) + key_seq, value) - self._toml_file._array_setitem_with_key_seq(self._name, index, key_seq, value) - # for k, v in value.items(): - # table[k] = v - else: - raise InvalidValueError('Can only append a dict to an array of tables') - - def __getitem__(self, item): - try: - return list.__getitem__(self, item) - except IndexError: - if item == len(self): - return FreshTable(parent=self, name=self._name, is_array=True) - else: - raise - - def append_fresh_table(self, fresh_table): - list.append(self, fresh_table) - if self._toml_file: - self._toml_file.append_fresh_table(fresh_table) diff --git a/pipenv/patched/contoml/file/cascadedict.py b/pipenv/patched/contoml/file/cascadedict.py deleted file mode 100644 index 4e97c0600d..0000000000 --- a/pipenv/patched/contoml/file/cascadedict.py +++ /dev/null @@ -1,56 +0,0 @@ -import operator -from functools import reduce -from contoml.file import raw - - -class CascadeDict: - """ - A dict-like object made up of one or more other dict-like objects where querying for an item cascade-gets - it from all the internal dicts in order of their listing, and setting an item sets it on the first dict listed. - """ - - def __init__(self, *internal_dicts): - assert internal_dicts, 'internal_dicts cannot be empty' - self._internal_dicts = tuple(internal_dicts) - - def cascaded_with(self, one_more_dict): - """ - Returns another instance with one more dict cascaded at the end. - """ - return CascadeDict(self._internal_dicts, one_more_dict,) - - def __getitem__(self, item): - for d in self._internal_dicts: - try: - return d[item] - except KeyError: - pass - raise KeyError - - def __setitem__(self, key, value): - self._internal_dicts[0][key] = value - - def keys(self): - return set(reduce(operator.or_, (set(d.keys()) for d in self._internal_dicts))) - - def items(self): - all_items = reduce(operator.add, (list(d.items()) for d in reversed(self._internal_dicts))) - unique_items = {k: v for k, v in all_items}.items() - return tuple(unique_items) - - def __contains__(self, item): - for d in self._internal_dicts: - if item in d: - return True - return False - - @property - def neutralized(self): - return {k: raw.to_raw(v) for k, v in self.items()} - - @property - def primitive_value(self): - return self.neutralized - - def __repr__(self): - return repr(self.primitive_value) diff --git a/pipenv/patched/contoml/file/file.py b/pipenv/patched/contoml/file/file.py deleted file mode 100644 index 99ce1483e6..0000000000 --- a/pipenv/patched/contoml/file/file.py +++ /dev/null @@ -1,293 +0,0 @@ -from prettytoml.errors import NoArrayFoundError, DuplicateKeysError, DuplicateTablesError -from contoml.file import structurer, toplevels, raw -from contoml.file.array import ArrayOfTables -from contoml.file.freshtable import FreshTable -import prettytoml.elements.factory as element_factory -import prettytoml.util as util - - -class TOMLFile: - """ - A TOMLFile object that tries its best to prserve formatting and order of mappings of the input source. - - Raises InvalidTOMLFileError on invalid input elements. - - Raises DuplicateKeysError, DuplicateTableError when appropriate. - """ - - def __init__(self, _elements): - self._elements = [] - self._navigable = {} - self.append_elements(_elements) - - def __getitem__(self, item): - try: - value = self._navigable[item] - if isinstance(value, (list, tuple)): - return ArrayOfTables(toml_file=self, name=item, iterable=value) - else: - return value - except KeyError: - return FreshTable(parent=self, name=item, is_array=False) - - def get(self, item, default=None): - """This was not here for who knows why.""" - - if item not in self: - return default - else: - return self.__getitem__(item) - - def __contains__(self, item): - return item in self.keys() - - def _setitem_with_key_seq(self, key_seq, value): - """ - Sets a the value in the TOML file located by the given key sequence. - - Example: - self._setitem(('key1', 'key2', 'key3'), 'text_value') - is equivalent to doing - self['key1']['key2']['key3'] = 'text_value' - """ - table = self - key_so_far = tuple() - for key in key_seq[:-1]: - key_so_far += (key,) - self._make_sure_table_exists(key_so_far) - table = table[key] - table[key_seq[-1]] = value - - def _array_setitem_with_key_seq(self, array_name, index, key_seq, value): - """ - Sets a the array value in the TOML file located by the given key sequence. - - Example: - self._array_setitem(array_name, index, ('key1', 'key2', 'key3'), 'text_value') - is equivalent to doing - self.array(array_name)[index]['key1']['key2']['key3'] = 'text_value' - """ - table = self.array(array_name)[index] - key_so_far = tuple() - for key in key_seq[:-1]: - key_so_far += (key,) - new_table = self._array_make_sure_table_exists(array_name, index, key_so_far) - if new_table is not None: - table = new_table - else: - table = table[key] - table[key_seq[-1]] = value - - def _make_sure_table_exists(self, name_seq): - """ - Makes sure the table with the full name comprising of name_seq exists. - """ - t = self - for key in name_seq[:-1]: - t = t[key] - name = name_seq[-1] - if name not in t: - self.append_elements([element_factory.create_table_header_element(name_seq), - element_factory.create_table({})]) - - def _array_make_sure_table_exists(self, array_name, index, name_seq): - """ - Makes sure the table with the full name comprising of name_seq exists. - """ - t = self[array_name][index] - for key in name_seq[:-1]: - t = t[key] - name = name_seq[-1] - if name not in t: - new_table = element_factory.create_table({}) - self.append_elements([element_factory.create_table_header_element((array_name,) + name_seq), new_table]) - return new_table - - def __delitem__(self, key): - table_element_index = self._elements.index(self._navigable[key]) - self._elements[table_element_index] = element_factory.create_table({}) - self._on_element_change() - - def __setitem__(self, key, value): - - # Setting an array-of-tables - if key and isinstance(value, (tuple, list)) and value and all(isinstance(v, dict) for v in value): - for table in value: - self.array(key).append(table) - - # Or setting a whole single table - elif isinstance(value, dict): - - if key and key in self: - del self[key] - - for key_seq, child_value in util.flatten_nested({key: value}).items(): - self._setitem_with_key_seq(key_seq, child_value) - - # if key in self._navigable: - # del self[key] - # index = self._elements.index(self._navigable[key]) - # self._elements = self._elements[:index] + [element_factory.create_table(value)] + self._elements[index+1:] - # else: - # if key: - # self._elements.append(element_factory.create_table_header_element(key)) - # self._elements.append(element_factory.create_table(value)) - - - # Or updating the anonymous section table - else: - # It's mea - self[''][key] = value - - self._on_element_change() - - def _detect_toplevels(self): - """ - Returns a sequence of TopLevel instances for the current state of this table. - """ - return tuple(e for e in toplevels.identify(self.elements) if isinstance(e, toplevels.Table)) - - def _update_table_fallbacks(self, table_toplevels): - """ - Updates the fallbacks on all the table elements to make relative table access possible. - - Raises DuplicateKeysError if appropriate. - """ - - if len(self.elements) <= 1: - return - - def parent_of(toplevel): - # Returns an TopLevel parent of the given entry, or None. - for parent_toplevel in table_toplevels: - if toplevel.name.sub_names[:-1] == parent_toplevel.name.sub_names: - return parent_toplevel - - for entry in table_toplevels: - if entry.name.is_qualified: - parent = parent_of(entry) - if parent: - child_name = entry.name.without_prefix(parent.name) - parent.table_element.set_fallback({child_name.sub_names[0]: entry.table_element}) - - def _recreate_navigable(self): - if self._elements: - self._navigable = structurer.structure(toplevels.identify(self._elements)) - - def array(self, name): - """ - Returns the array of tables with the given name. - """ - if name in self._navigable: - if isinstance(self._navigable[name], (list, tuple)): - return self[name] - else: - raise NoArrayFoundError - else: - return ArrayOfTables(toml_file=self, name=name) - - def _on_element_change(self): - self._recreate_navigable() - - table_toplevels = self._detect_toplevels() - self._update_table_fallbacks(table_toplevels) - - def append_elements(self, elements): - """ - Appends more elements to the contained internal elements. - """ - self._elements = self._elements + list(elements) - self._on_element_change() - - def prepend_elements(self, elements): - """ - Prepends more elements to the contained internal elements. - """ - self._elements = list(elements) + self._elements - self._on_element_change() - - def dumps(self): - """ - Returns the TOML file serialized back to str. - """ - return ''.join(element.serialized() for element in self._elements) - - def dump(self, file_path): - with open(file_path, mode='w') as fp: - fp.write(self.dumps()) - - def keys(self): - return set(self._navigable.keys()) | {''} - - def values(self): - return self._navigable.values() - - def items(self): - items = self._navigable.items() - - def has_anonymous_entry(): - return any(key == '' for (key, _) in items) - - if has_anonymous_entry(): - return items - else: - return list(items) + [('', self[''])] - - @property - def primitive(self): - """ - Returns a primitive object representation for this container (which is a dict). - - WARNING: The returned container does not contain any markup or formatting metadata. - """ - raw_container = raw.to_raw(self._navigable) - - # Collapsing the anonymous table onto the top-level container is present - if '' in raw_container: - raw_container.update(raw_container['']) - del raw_container[''] - - return raw_container - - def append_fresh_table(self, fresh_table): - """ - Gets called by FreshTable instances when they get written to. - """ - if fresh_table.name: - elements = [] - if fresh_table.is_array: - elements += [element_factory.create_array_of_tables_header_element(fresh_table.name)] - else: - elements += [element_factory.create_table_header_element(fresh_table.name)] - - elements += [fresh_table, element_factory.create_newline_element()] - self.append_elements(elements) - - else: - # It's an anonymous table - self.prepend_elements([fresh_table, element_factory.create_newline_element()]) - - @property - def elements(self): - return self._elements - - def __str__(self): - - is_empty = (not self['']) and (not tuple(k for k in self.keys() if k)) - - def key_name(key): - return '[ANONYMOUS]' if not key else key - - def pair(key, value): - return '%s = %s' % (key_name(key), str(value)) - - content_text = '' if is_empty else \ - '\n\t' + ',\n\t'.join(pair(k, v) for (k, v) in self.items() if v) + '\n' - - return "TOMLFile{%s}" % content_text - - def __repr__(self): - return str(self) - - - diff --git a/pipenv/patched/contoml/file/freshtable.py b/pipenv/patched/contoml/file/freshtable.py deleted file mode 100644 index f28e2f74d2..0000000000 --- a/pipenv/patched/contoml/file/freshtable.py +++ /dev/null @@ -1,45 +0,0 @@ -from prettytoml.elements.table import TableElement - - -class FreshTable(TableElement): - """ - A fresh TableElement that appended itself to each of parents when it first gets written to at most once. - - parents is a sequence of objects providing an append_fresh_table(TableElement) method - """ - - def __init__(self, parent, name, is_array=False): - TableElement.__init__(self, sub_elements=[]) - - self._parent = parent - self._name = name - self._is_array = is_array - - # As long as this flag is false, setitem() operations will append the table header and this table - # to the toml_file's elements - self.__appended = False - - @property - def name(self): - return self._name - - @property - def is_array(self): - return self._is_array - - def _append_to_parent(self): - """ - Causes this ephemeral table to be persisted on the TOMLFile. - """ - - if self.__appended: - return - - if self._parent is not None: - self._parent.append_fresh_table(self) - - self.__appended = True - - def __setitem__(self, key, value): - TableElement.__setitem__(self, key, value) - self._append_to_parent() diff --git a/pipenv/patched/contoml/file/peekableit.py b/pipenv/patched/contoml/file/peekableit.py deleted file mode 100644 index b5658a71c5..0000000000 --- a/pipenv/patched/contoml/file/peekableit.py +++ /dev/null @@ -1,30 +0,0 @@ -import itertools - - -class PeekableIterator: - - # Returned by peek() when the iterator is exhausted. Truthiness is False. - Nothing = tuple() - - def __init__(self, iter): - self._iter = iter - - def __next__(self): - return next(self._iter) - - def next(self): - return self.__next__() - - def __iter__(self): - return self - - def peek(self): - """ - Returns PeekableIterator.Nothing when the iterator is exhausted. - """ - try: - v = next(self._iter) - self._iter = itertools.chain((v,), self._iter) - return v - except StopIteration: - return PeekableIterator.Nothing diff --git a/pipenv/patched/contoml/file/raw.py b/pipenv/patched/contoml/file/raw.py deleted file mode 100644 index 8cffdb6e67..0000000000 --- a/pipenv/patched/contoml/file/raw.py +++ /dev/null @@ -1,16 +0,0 @@ -from prettytoml.elements.abstracttable import AbstractTable - - -def to_raw(x): - from contoml.file.cascadedict import CascadeDict - - if isinstance(x, AbstractTable): - return x.primitive_value - elif isinstance(x, CascadeDict): - return x.neutralized - elif isinstance(x, (list, tuple)): - return [to_raw(y) for y in x] - elif isinstance(x, dict): - return {k: to_raw(v) for (k, v) in x.items()} - else: - return x diff --git a/pipenv/patched/contoml/file/structurer.py b/pipenv/patched/contoml/file/structurer.py deleted file mode 100644 index 72d002cde2..0000000000 --- a/pipenv/patched/contoml/file/structurer.py +++ /dev/null @@ -1,116 +0,0 @@ -from contoml.file import toplevels -from contoml.file.cascadedict import CascadeDict - - -class NamedDict(dict): - """ - A dict that can use Name instances as keys. - """ - - def __init__(self, other_dict=None): - dict.__init__(self) - if other_dict: - for k, v in other_dict.items(): - self[k] = v - - def __setitem__(self, key, value): - """ - key can be an Name instance. - - When key is a path in the form of an Name instance, all the parents and grandparents of the value are - created along the way as instances of NamedDict. If the parent of the value exists, it is replaced with a - CascadeDict() that cascades the old parent value with a new NamedDict that contains the given child name - and value. - """ - if isinstance(key, toplevels.Name): - - if len(key.sub_names) == 1: - name = key.sub_names[0] - if name in self: - self[name] = CascadeDict(self[name], value) - else: - self[name] = value - - elif len(key.sub_names) > 1: - name = key.sub_names[0] - rest_of_key = key.drop(1) - if name in self: - named_dict = NamedDict() - named_dict[rest_of_key] = value - self[name] = CascadeDict(self[name], named_dict) - else: - self[name] = NamedDict() - self[name][rest_of_key] = value - else: - return dict.__setitem__(self, key, value) - - def __contains__(self, item): - try: - _ = self[item] - return True - except KeyError: - return False - - def append(self, key, value): - """ - Makes sure the value pointed to by key exists and is a list and appends the given value to it. - """ - if key in self: - self[key].append(value) - else: - self[key] = [value] - - def __getitem__(self, item): - - if isinstance(item, toplevels.Name): - d = self - for name in item.sub_names: - d = d[name] - return d - else: - return dict.__getitem__(self, item) - - -def structure(table_toplevels): - """ - Accepts an ordered sequence of TopLevel instances and returns a navigable object structure representation of the - TOML file. - """ - - table_toplevels = tuple(table_toplevels) - obj = NamedDict() - - last_array_of_tables = None # The Name of the last array-of-tables header - - for toplevel in table_toplevels: - - if isinstance(toplevel, toplevels.AnonymousTable): - obj[''] = toplevel.table_element - - elif isinstance(toplevel, toplevels.Table): - if last_array_of_tables and toplevel.name.is_prefixed_with(last_array_of_tables): - seq = obj[last_array_of_tables] - unprefixed_name = toplevel.name.without_prefix(last_array_of_tables) - - seq[-1] = CascadeDict(seq[-1], NamedDict({unprefixed_name: toplevel.table_element})) - else: - obj[toplevel.name] = toplevel.table_element - else: # It's an ArrayOfTables - - if last_array_of_tables and toplevel.name != last_array_of_tables and \ - toplevel.name.is_prefixed_with(last_array_of_tables): - - seq = obj[last_array_of_tables] - unprefixed_name = toplevel.name.without_prefix(last_array_of_tables) - - if unprefixed_name in seq[-1]: - seq[-1][unprefixed_name].append(toplevel.table_element) - else: - cascaded_with = NamedDict({unprefixed_name: [toplevel.table_element]}) - seq[-1] = CascadeDict(seq[-1], cascaded_with) - - else: - obj.append(toplevel.name, toplevel.table_element) - last_array_of_tables = toplevel.name - - return obj diff --git a/pipenv/patched/contoml/file/test_cascadedict.py b/pipenv/patched/contoml/file/test_cascadedict.py deleted file mode 100644 index d692711e7d..0000000000 --- a/pipenv/patched/contoml/file/test_cascadedict.py +++ /dev/null @@ -1,25 +0,0 @@ -from contoml.file.cascadedict import CascadeDict - - -def test_cascadedict(): - - d1 = {'a': 1, 'b': 2, 'c': 3} - d2 = {'b': 12, 'e': 4, 'f': 5} - - cascade = CascadeDict(d1, d2) - - # Test querying - assert cascade['a'] == 1 - assert cascade['b'] == 2 - assert cascade['c'] == 3 - assert cascade['e'] == 4 - assert cascade.keys() == {'a', 'b', 'c', 'e', 'f'} - assert set(cascade.items()) == {('a', 1), ('b', 2), ('c', 3), ('e', 4), ('f', 5)} - - # Test mutating - cascade['a'] = 11 - cascade['f'] = 'fff' - cascade['super'] = 'man' - assert d1['a'] == 11 - assert d1['super'] == 'man' - assert d1['f'] == 'fff' diff --git a/pipenv/patched/contoml/file/test_entries.py b/pipenv/patched/contoml/file/test_entries.py deleted file mode 100644 index 25584e8298..0000000000 --- a/pipenv/patched/contoml/file/test_entries.py +++ /dev/null @@ -1,20 +0,0 @@ -from prettytoml import parser, lexer -from contoml.file import toplevels - - -def test_entry_extraction(): - text = open('sample.toml').read() - elements = parser.parse_tokens(lexer.tokenize(text)) - - e = tuple(toplevels.identify(elements)) - - assert len(e) == 13 - assert isinstance(e[0], toplevels.AnonymousTable) - - -def test_entry_names(): - name_a = toplevels.Name(('super', 'sub1')) - name_b = toplevels.Name(('super', 'sub1', 'sub2', 'sub3')) - - assert name_b.is_prefixed_with(name_a) - assert name_b.without_prefix(name_a).sub_names == ('sub2', 'sub3') diff --git a/pipenv/patched/contoml/file/test_peekableit.py b/pipenv/patched/contoml/file/test_peekableit.py deleted file mode 100644 index 5c053a384c..0000000000 --- a/pipenv/patched/contoml/file/test_peekableit.py +++ /dev/null @@ -1,12 +0,0 @@ -from contoml.file.peekableit import PeekableIterator - - -def test_peekable_iterator(): - - peekable = PeekableIterator(i for i in (1, 2, 3, 4)) - - assert peekable.peek() == 1 - assert peekable.peek() == 1 - assert peekable.peek() == 1 - - assert [next(peekable), next(peekable), next(peekable), next(peekable)] == [1, 2, 3, 4] diff --git a/pipenv/patched/contoml/file/test_structurer.py b/pipenv/patched/contoml/file/test_structurer.py deleted file mode 100644 index b3ea4b4ef4..0000000000 --- a/pipenv/patched/contoml/file/test_structurer.py +++ /dev/null @@ -1,41 +0,0 @@ -from prettytoml import lexer, parser -from contoml.file import toplevels -from prettytoml.parser import elementsanitizer -from contoml.file.structurer import NamedDict, structure -from prettytoml.parser.tokenstream import TokenStream - - -def test_NamedDict(): - - d = NamedDict() - - d[toplevels.Name(('super', 'sub1', 'sub2'))] = {'sub3': 12} - d[toplevels.Name(('super', 'sub1', 'sub2'))]['sub4'] = 42 - - assert d[toplevels.Name(('super', 'sub1', 'sub2', 'sub3'))] == 12 - assert d[toplevels.Name(('super', 'sub1', 'sub2', 'sub4'))] == 42 - - -def test_structure(): - tokens = lexer.tokenize(open('sample.toml').read()) - elements = elementsanitizer.sanitize(parser.parse_tokens(tokens)) - entries_ = tuple(toplevels.identify(elements)) - - s = structure(entries_) - - assert s['']['title'] == 'TOML Example' - assert s['owner']['name'] == 'Tom Preston-Werner' - assert s['database']['ports'][1] == 8001 - assert s['servers']['alpha']['dc'] == 'eqdc10' - assert s['clients']['data'][1][0] == 1 - assert s['clients']['key3'] == 'The quick brown fox jumps over the lazy dog.' - - assert s['fruit'][0]['name'] == 'apple' - assert s['fruit'][0]['physical']['color'] == 'red' - assert s['fruit'][0]['physical']['shape'] == 'round' - assert s['fruit'][0]['variety'][0]['name'] == 'red delicious' - assert s['fruit'][0]['variety'][1]['name'] == 'granny smith' - - assert s['fruit'][1]['name'] == 'banana' - assert s['fruit'][1]['variety'][0]['name'] == 'plantain' - assert s['fruit'][1]['variety'][0]['points'][2]['y'] == 4 diff --git a/pipenv/patched/contoml/file/toplevels.py b/pipenv/patched/contoml/file/toplevels.py deleted file mode 100644 index 640380726d..0000000000 --- a/pipenv/patched/contoml/file/toplevels.py +++ /dev/null @@ -1,142 +0,0 @@ -""" - Top-level entries in a TOML file. -""" - -from prettytoml import elements -from prettytoml.elements import TableElement, TableHeaderElement -from .peekableit import PeekableIterator - - -class TopLevel: - """ - A abstract top-level entry. - """ - - def __init__(self, names, table_element): - self._table_element = table_element - self._names = Name(names) - - @property - def table_element(self): - return self._table_element - - @property - def name(self): - """ - The distinct name of a table entry as an Name instance. - """ - return self._names - - -class Name: - - def __init__(self, names): - self._names = names - - @property - def sub_names(self): - return self._names - - def drop(self, n=0): - """ - Returns the name after dropping the first n entries of it. - """ - return Name(names=self._names[n:]) - - def is_prefixed_with(self, names): - if isinstance(names, Name): - return self.is_prefixed_with(names.sub_names) - - for i, name in enumerate(names): - if self._names[i] != name: - return False - return True - - def without_prefix(self, names): - if isinstance(names, Name): - return self.without_prefix(names.sub_names) - - for i, name in enumerate(names): - if name != self._names[i]: - return Name(self._names[i:]) - return Name(names=self.sub_names[len(names):]) - - @property - def is_qualified(self): - return len(self._names) > 1 - - def __str__(self): - return '.'.join(self.sub_names) - - def __hash__(self): - return hash(str(self)) - - def __eq__(self, other): - return str(self) == str(other) - - def __ne__(self, other): - return not self.__eq__(other) - - -class AnonymousTable(TopLevel): - - def __init__(self, table_element): - TopLevel.__init__(self, ('',), table_element) - - -class Table(TopLevel): - - def __init__(self, names, table_element): - TopLevel.__init__(self, names=names, table_element=table_element) - - -class ArrayOfTables(TopLevel): - - def __init__(self, names, table_element): - TopLevel.__init__(self, names=names, table_element=table_element) - - -def _validate_file_elements(file_elements): - pass - - -def identify(file_elements): - """ - Outputs an ordered sequence of instances of TopLevel types. - - Elements start with an optional TableElement, followed by zero or more pairs of (TableHeaderElement, TableElement). - """ - - if not file_elements: - return - - _validate_file_elements(file_elements) - - # An iterator over enumerate(the non-metadata) elements - iterator = PeekableIterator((element_i, element) for (element_i, element) in enumerate(file_elements) - if element.type != elements.TYPE_METADATA) - - try: - _, first_element = iterator.peek() - if isinstance(first_element, TableElement): - iterator.next() - yield AnonymousTable(first_element) - except KeyError: - pass - except StopIteration: - return - - for element_i, element in iterator: - - if not isinstance(element, TableHeaderElement): - continue - - # If TableHeader of a regular table, return Table following it - if not element.is_array_of_tables: - table_element_i, table_element = next(iterator) - yield Table(names=element.names, table_element=table_element) - - # If TableHeader of an array of tables, do your thing - else: - table_element_i, table_element = next(iterator) - yield ArrayOfTables(names=element.names, table_element=table_element) diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index f73ed5a6e2..4f3ee409d7 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -3,5 +3,4 @@ git+https://github.com/jumpscale7/python-consistent-toml.git#egg=contoml crayons==0.1.2 pipfile==0.0.2 pip-tools==3.1.0 -prettytoml==0.3 pip==18.1 diff --git a/pipenv/patched/prettytoml/LICENSE b/pipenv/patched/prettytoml/LICENSE deleted file mode 100644 index 116fa4e558..0000000000 --- a/pipenv/patched/prettytoml/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Jumpscale - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/pipenv/patched/prettytoml/__init__.py b/pipenv/patched/prettytoml/__init__.py deleted file mode 100644 index d731074832..0000000000 --- a/pipenv/patched/prettytoml/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -from ._version import VERSION - -__version__ = VERSION - - -def prettify(toml_text): - """ - Prettifies and returns the TOML file content provided. - """ - from .parser import parse_tokens - from .lexer import tokenize - from .prettifier import prettify as element_prettify - - tokens = tokenize(toml_text, is_top_level=True) - elements = parse_tokens(tokens) - prettified = element_prettify(elements) - return ''.join(pretty_element.serialized() for pretty_element in prettified) - - -def prettify_from_file(file_path): - """ - Reads, prettifies and returns the TOML file specified by the file_path. - """ - with open(file_path, 'r') as fp: - return prettify(fp.read()) diff --git a/pipenv/patched/prettytoml/_version.py b/pipenv/patched/prettytoml/_version.py deleted file mode 100644 index e0f154708e..0000000000 --- a/pipenv/patched/prettytoml/_version.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = 'master' diff --git a/pipenv/patched/prettytoml/elements/__init__.py b/pipenv/patched/prettytoml/elements/__init__.py deleted file mode 100644 index ece2112396..0000000000 --- a/pipenv/patched/prettytoml/elements/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ - -""" - TOML file elements (a higher abstraction layer than individual lexical tokens). -""" - -from .traversal import TraversalMixin -from .errors import InvalidElementError -from .table import TableElement -from .tableheader import TableHeaderElement -from .common import TYPE_METADATA, TYPE_ATOMIC, TYPE_CONTAINER, TYPE_MARKUP - -from . import traversal -from . import factory diff --git a/pipenv/patched/prettytoml/elements/abstracttable.py b/pipenv/patched/prettytoml/elements/abstracttable.py deleted file mode 100644 index 6a2c174aec..0000000000 --- a/pipenv/patched/prettytoml/elements/abstracttable.py +++ /dev/null @@ -1,92 +0,0 @@ -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - -from prettytoml.elements.common import ContainerElement -from prettytoml.elements import traversal - - -class AbstractTable(ContainerElement, traversal.TraversalMixin, Mapping): - """ - Common code for handling tables as key-value pairs with metadata elements sprinkled all over. - - Assumes input sub_elements are correct. - """ - - def __init__(self, sub_elements): - ContainerElement.__init__(self, sub_elements) - self._fallback = None - - def _enumerate_items(self): - """ - Returns ((key_index, key_element), (value_index, value_element)) for all the element key-value pairs. - """ - non_metadata = self._enumerate_non_metadata_sub_elements() - while True: - try: - yield next(non_metadata), next(non_metadata) - except StopIteration: - return - - def items(self): - for (key_i, key), (value_i, value) in self._enumerate_items(): - yield key.value, value.value - if self._fallback: - for key, value in self._fallback.items(): - yield key, value - - def keys(self): - return tuple(key for (key, _) in self.items()) - - def values(self): - return tuple(value for (_, value) in self.items()) - - def __len__(self): - return len(tuple(self._enumerate_items())) - - def __iter__(self): - return (key for key, _ in self.items()) - - def __contains__(self, item): - return item in self.keys() - - def _find_key_and_value(self, key): - """ - Returns (key_i, value_i) corresponding to the given key value. - - Raises KeyError if no matching key found. - """ - for (key_i, key_element), (value_i, value_element) in self._enumerate_items(): - if key_element.value == key: - return key_i, value_i - raise KeyError - - def __getitem__(self, item): - for key, value in self.items(): - if key == item: - return value - raise KeyError - - def get(self, key, default=None): - try: - return self[key] - except KeyError: - return default - - def set_fallback(self, fallback): - """ - Sets a fallback dict-like instance to be used to look up values after they are not found - in this instance. - """ - self._fallback = fallback - - @property - def primitive_value(self): - """ - Returns a primitive Python value without any formatting or markup metadata. - """ - return { - key: - value.primitive_value if hasattr(value, 'primitive_value') else value for key, value in self.items() - } diff --git a/pipenv/patched/prettytoml/elements/array.py b/pipenv/patched/prettytoml/elements/array.py deleted file mode 100644 index 36c648b972..0000000000 --- a/pipenv/patched/prettytoml/elements/array.py +++ /dev/null @@ -1,136 +0,0 @@ -from prettytoml.elements import common, factory, traversal -from prettytoml.elements.common import Element, ContainerElement -from prettytoml.elements.factory import create_element -from prettytoml.elements.metadata import NewlineElement -from prettytoml.elements.errors import InvalidElementError - - -class ArrayElement(ContainerElement, traversal.TraversalMixin): - """ - A sequence-like container element containing other atomic elements or other containers. - - Implements list-like interface. - - Assumes input sub_elements are correct for an array element. - - Raises an InvalidElementError if contains heterogeneous values. - """ - - def __init__(self, sub_elements): - common.ContainerElement.__init__(self, sub_elements) - self._check_homogeneity() - - def _check_homogeneity(self): - if len(set(type(v) for v in self.primitive_value)) > 1: - raise InvalidElementError('Array should be homogeneous') - - def __len__(self): - return len(tuple(self._enumerate_non_metadata_sub_elements())) - - def __getitem__(self, i): - """ - Returns the ith entry, which can be a primitive value, a seq-lie, or a dict-like object. - """ - return self._find_value(i)[1].value - - def __setitem__(self, i, value): - value_i, _ = self._find_value(i) - new_element = value if isinstance(value, Element) else factory.create_element(value) - self._sub_elements = self.sub_elements[:value_i] + [new_element] + self.sub_elements[value_i+1:] - - @property - def value(self): - return self # self is a sequence-like value - - @property - def primitive_value(self): - """ - Returns a primitive Python value without any formatting or markup metadata. - """ - return list( - self[i].primitive_value if hasattr(self[i], 'primitive_value') - else self[i] - for i in range(len(self))) - - def __str__(self): - return "Array{}".format(self.primitive_value) - - def append(self, v): - new_entry = [create_element(v)] - - if self: # If not empty, we need a comma and whitespace prefix! - new_entry = [ - factory.create_operator_element(','), - factory.create_whitespace_element(), - ] + new_entry - - insertion_index = self._find_closing_square_bracket() - self._sub_elements = self._sub_elements[:insertion_index] + new_entry + \ - self._sub_elements[insertion_index:] - - def _find_value(self, i): - """ - Returns (value_index, value) of ith value in this sequence. - - Raises IndexError if not found. - """ - return tuple(self._enumerate_non_metadata_sub_elements())[i] - - def __delitem__(self, i): - value_i, value = self._find_value(i) - - begin, end = value_i, value_i+1 - - # Rules: - # 1. begin should be index to the preceding comma to the value - # 2. end should be index to the following comma, or the closing bracket - # 3. If no preceding comma found but following comma found then end should be the index of the following value - - preceding_comma = self._find_preceding_comma(value_i) - found_preceding_comma = preceding_comma >= 0 - if found_preceding_comma: - begin = preceding_comma - - following_comma = self._find_following_comma(value_i) - if following_comma >= 0: - if not found_preceding_comma: - end = self._find_following_non_metadata(following_comma) - else: - end = following_comma - else: - end = self._find_following_closing_square_bracket() - - self._sub_elements = self.sub_elements[:begin] + self._sub_elements[end:] - - @property - def is_multiline(self): - return any(isinstance(e, (NewlineElement)) for e in self.elements) - - def turn_into_multiline(self): - """ - Turns this array into a multi-line array with each element lying on its own line. - """ - if self.is_multiline: - return - - i = self._find_following_comma(-1) - - def next_entry_i(): - return self._find_following_non_metadata(i) - - def next_newline_i(): - return self._find_following_newline(i) - - def next_closing_bracket_i(): - return self._find_following_closing_square_bracket(i) - - def next_comma_i(): - return self._find_following_comma(i) - - while i < len(self.elements)-1: - if next_newline_i() < next_entry_i(): - self.elements.insert(i+1, factory.create_newline_element()) - if float('-inf') < next_comma_i() < next_closing_bracket_i(): - i = next_comma_i() - else: - i = next_closing_bracket_i() diff --git a/pipenv/patched/prettytoml/elements/atomic.py b/pipenv/patched/prettytoml/elements/atomic.py deleted file mode 100644 index 571810d9d8..0000000000 --- a/pipenv/patched/prettytoml/elements/atomic.py +++ /dev/null @@ -1,52 +0,0 @@ -from ..tokens import py2toml, toml2py -from . import common -from prettytoml.util import is_dict_like, is_sequence_like -from .errors import InvalidElementError - - -class AtomicElement(common.TokenElement): - """ - An element containing a sequence of tokens representing a single atomic value that can be updated in place. - - Raises: - InvalidElementError: when passed an invalid sequence of tokens. - """ - - def __init__(self, _tokens): - common.TokenElement.__init__(self, _tokens, common.TYPE_ATOMIC) - - def _validate_tokens(self, _tokens): - if len([token for token in _tokens if not token.type.is_metadata]) != 1: - raise InvalidElementError('Tokens making up an AtomicElement must contain only one non-metadata token') - - def serialized(self): - return ''.join(token.source_substring for token in self.tokens) - - def _value_token_index(self): - """ - Finds the token where the value is stored. - """ - # TODO: memoize this value - for i, token in enumerate(self.tokens): - if not token.type.is_metadata: - return i - raise RuntimeError('could not find a value token') - - @property - def value(self): - """ - Returns a Python value contained in this atomic element. - """ - return toml2py.deserialize(self._tokens[self._value_token_index()]) - - @property - def primitive_value(self): - return self.value - - def set(self, value): - """ - Sets the contained value to the given one. - """ - assert (not is_sequence_like(value)) and (not is_dict_like(value)), 'the value must be an atomic primitive' - token_index = self._value_token_index() - self._tokens[token_index] = py2toml.create_primitive_token(value) diff --git a/pipenv/patched/prettytoml/elements/common.py b/pipenv/patched/prettytoml/elements/common.py deleted file mode 100644 index 7508047ae9..0000000000 --- a/pipenv/patched/prettytoml/elements/common.py +++ /dev/null @@ -1,101 +0,0 @@ -from abc import abstractmethod - -TYPE_METADATA = 'element-metadata' -TYPE_ATOMIC = 'element-atomic' -TYPE_CONTAINER = 'element-container' -TYPE_MARKUP = 'element-markup' - -class Element: - """ - An Element: - - is one or more Token instances, or one or more other Element instances. Not both. - - knows how to serialize its value back to valid TOML code. - - A non-metadata Element is an Element that: - - knows how to deserialize its content into usable Python primitive, seq-like, or dict-like value. - - knows how to update its content from a Python primitive, seq-like, or dict-like value - while maintaining its formatting. - """ - - def __init__(self, _type): - self._type = _type - - @property - def type(self): - return self._type - - @abstractmethod - def serialized(self): - """ - TOML serialization of this element as str. - """ - raise NotImplementedError - - -class TokenElement(Element): - """ - An Element made up of tokens - """ - - def __init__(self, _tokens, _type): - Element.__init__(self, _type) - self._validate_tokens(_tokens) - self._tokens = list(_tokens) - - @property - def tokens(self): - return self._tokens - - @property - def first_token(self): - return self._tokens[0] - - @abstractmethod - def _validate_tokens(self, _tokens): - raise NotImplementedError - - def serialized(self): - return ''.join(token.source_substring for token in self._tokens) - - def __repr__(self): - return repr(self.tokens) - - @property - def primitive_value(self): - """ - Returns a primitive Python value without any formatting or markup metadata. - """ - raise NotImplementedError - - -class ContainerElement(Element): - """ - An Element containing exclusively other elements. - """ - - def __init__(self, sub_elements): - Element.__init__(self, TYPE_CONTAINER) - self._sub_elements = list(sub_elements) - - @property - def sub_elements(self): - return self._sub_elements - - @property - def elements(self): - return self.sub_elements - - def serialized(self): - return ''.join(element.serialized() for element in self.sub_elements) - - def __repr__(self): - return repr(self.primitive_value) - - @property - def primitive_value(self): - """ - Returns a primitive Python value without any formatting or markup metadata. - """ - raise NotImplementedError - - diff --git a/pipenv/patched/prettytoml/elements/errors.py b/pipenv/patched/prettytoml/elements/errors.py deleted file mode 100644 index 0fcf2e998d..0000000000 --- a/pipenv/patched/prettytoml/elements/errors.py +++ /dev/null @@ -1,13 +0,0 @@ - -class InvalidElementError(Exception): - """ - Raised by Element factories when the given sequence of tokens or sub-elements are invalid for the - specific type of Element being created. - """ - - def __init__(self, message): - self.message = message - - def __repr__(self): - return "InvalidElementError: {}".format(self.message) - diff --git a/pipenv/patched/prettytoml/elements/factory.py b/pipenv/patched/prettytoml/elements/factory.py deleted file mode 100644 index 177738dbd0..0000000000 --- a/pipenv/patched/prettytoml/elements/factory.py +++ /dev/null @@ -1,152 +0,0 @@ -import datetime -import functools -import six -from prettytoml import tokens -from prettytoml.tokens import py2toml -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import PunctuationElement, WhitespaceElement, NewlineElement -from prettytoml.elements.tableheader import TableHeaderElement -from prettytoml.util import join_with, is_sequence_like - - -def create_element(value, multiline_strings_allowed=True): - """ - Creates and returns the appropriate elements.Element instance from the given Python primitive, sequence-like, - or dict-like value. - """ - from prettytoml.elements.array import ArrayElement - - if isinstance(value, (int, float, bool, datetime.datetime, datetime.date) + six.string_types) or value is None: - primitive_token = py2toml.create_primitive_token(value, multiline_strings_allowed=multiline_strings_allowed) - return AtomicElement((primitive_token,)) - - elif isinstance(value, (list, tuple)): - preamble = [create_operator_element('[')] - postable = [create_operator_element(']')] - stuffing_elements = [create_element(v) for v in value] - spaced_stuffing = join_with(stuffing_elements, - separator=[create_operator_element(','), create_whitespace_element()]) - - return ArrayElement(preamble + spaced_stuffing + postable) - - elif isinstance(value, dict): - return create_inline_table(value, multiline_table=False, multiline_strings_allowed=multiline_strings_allowed) - - else: - raise RuntimeError('Value type unaccounted for: {} of type {}'.format(value, type(value))) - - -def create_inline_table(from_dict, multiline_table=False, multiline_strings_allowed=True): - """ - Creates an InlineTable element from the given dict instance. - """ - - from prettytoml.elements.inlinetable import InlineTableElement - - preamble = [create_operator_element('{')] - postable = [create_operator_element('}')] - - stuffing_elements = ( - ( - create_string_element(k, bare_allowed=True), - create_whitespace_element(), - create_operator_element('='), - create_whitespace_element(), - create_element(v, multiline_strings_allowed=False) - ) for (k, v) in from_dict.items()) - - pair_separator = [create_operator_element(','), - create_newline_element() if multiline_table else create_whitespace_element()] - spaced_elements = join_with(stuffing_elements, separator=pair_separator) - - return InlineTableElement(preamble + spaced_elements + postable) - - -def create_string_element(value, bare_allowed=False): - """ - Creates and returns an AtomicElement wrapping a string value. - """ - return AtomicElement((py2toml.create_string_token(value, bare_allowed),)) - - -def create_operator_element(operator): - """ - Creates a PunctuationElement instance containing an operator token of the specified type. The operator - should be a TOML source str. - """ - operator_type_map = { - ',': tokens.TYPE_OP_COMMA, - '=': tokens.TYPE_OP_ASSIGNMENT, - '[': tokens.TYPE_OP_SQUARE_LEFT_BRACKET, - ']': tokens.TYPE_OP_SQUARE_RIGHT_BRACKET, - '[[': tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET, - ']]': tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET, - '{': tokens.TYPE_OP_CURLY_LEFT_BRACKET, - '}': tokens.TYPE_OP_CURLY_RIGHT_BRACKET, - } - - ts = (tokens.Token(operator_type_map[operator], operator),) - return PunctuationElement(ts) - - -def create_newline_element(): - """ - Creates and returns a single NewlineElement. - """ - ts = (tokens.Token(tokens.TYPE_NEWLINE, '\n'),) - return NewlineElement(ts) - - -def create_whitespace_element(length=1, char=' '): - """ - Creates and returns a WhitespaceElement containing spaces. - """ - ts = (tokens.Token(tokens.TYPE_WHITESPACE, char),) * length - return WhitespaceElement(ts) - - -def create_table_header_element(names): - - name_elements = [] - - if isinstance(names, six.string_types): - name_elements = [py2toml.create_string_token(names, bare_string_allowed=True)] - else: - for (i, name) in enumerate(names): - name_elements.append(py2toml.create_string_token(name, bare_string_allowed=True)) - if i < (len(names)-1): - name_elements.append(py2toml.operator_token(tokens.TYPE_OPT_DOT)) - - return TableHeaderElement( - [py2toml.operator_token(tokens.TYPE_OP_SQUARE_LEFT_BRACKET)] + name_elements + - [py2toml.operator_token(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET), py2toml.operator_token(tokens.TYPE_NEWLINE)], - ) - - -def create_array_of_tables_header_element(name): - return TableHeaderElement(( - py2toml.operator_token(tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET), - py2toml.create_string_token(name, bare_string_allowed=True), - py2toml.operator_token(tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET), - py2toml.operator_token(tokens.TYPE_NEWLINE), - )) - - -def create_table(dict_value): - """ - Creates a TableElement out of a dict instance. - """ - from prettytoml.elements.table import TableElement - - if not isinstance(dict_value, dict): - raise ValueError('input must be a dict instance.') - - table_element = TableElement([create_newline_element()]) - for k, v in dict_value.items(): - table_element[k] = create_element(v) - - return table_element - - -def create_multiline_string(text, maximum_line_length): - return AtomicElement(_tokens=[py2toml.create_multiline_string(text, maximum_line_length)]) diff --git a/pipenv/patched/prettytoml/elements/inlinetable.py b/pipenv/patched/prettytoml/elements/inlinetable.py deleted file mode 100644 index 7b985fc150..0000000000 --- a/pipenv/patched/prettytoml/elements/inlinetable.py +++ /dev/null @@ -1,78 +0,0 @@ -from prettytoml.elements import factory, abstracttable -from prettytoml.elements.common import Element - - -class InlineTableElement(abstracttable.AbstractTable): - """ - An Element containing key-value pairs, representing an inline table. - - Implements dict-like interface. - - Assumes input sub_elements are correct for an inline table element. - """ - - def __init__(self, sub_elements): - abstracttable.AbstractTable.__init__(self, sub_elements) - - def __setitem__(self, key, value): - - new_element = value if isinstance(value, Element) else factory.create_element(value) - - try: - - key_i, value_i = self._find_key_and_value(key) - # Found, then replace the value element with a new one - self._sub_elements = self.sub_elements[:value_i] + [new_element] + self.sub_elements[value_i+1:] - - except KeyError: # Key does not exist, adding anew! - - new_entry = [ - factory.create_string_element(key, bare_allowed=True), - factory.create_whitespace_element(), - factory.create_operator_element('='), - factory.create_whitespace_element(), - new_element, - ] - - if self: # If not empty - new_entry = [ - factory.create_operator_element(','), - factory.create_whitespace_element(), - ] + new_entry - - insertion_index = self._find_closing_curly_bracket() - self._sub_elements = self.sub_elements[:insertion_index] + new_entry + self.sub_elements[insertion_index:] - - def __delitem__(self, key): - - key_i, value_i = self._find_key_and_value(key) - - begin, end = key_i, value_i+1 - - # Rules: - # 1. begin should be index to the preceding comma to the key - # 2. end should be index to the following comma, or the closing bracket - # 3. If no preceding comma found but following comma found then end should be the index of the following key - - preceding_comma = self._find_preceding_comma(begin) - found_preceding_comma = preceding_comma >= 0 - if found_preceding_comma: - begin = preceding_comma - - following_comma = self._find_following_comma(value_i) - if following_comma >= 0: - if not found_preceding_comma: - end = self._find_following_non_metadata(following_comma) - else: - end = following_comma - else: - end = self._find_closing_curly_bracket() - - self._sub_elements = self.sub_elements[:begin] + self.sub_elements[end:] - - def multiline_equivalent(self): - return factory.create_inline_table(self.primitive_value, multiline_table=True, multiline_strings_allowed=True) - - @property - def value(self): - return self # self is a dict-like value that is perfectly usable diff --git a/pipenv/patched/prettytoml/elements/metadata.py b/pipenv/patched/prettytoml/elements/metadata.py deleted file mode 100644 index d5ee1061b1..0000000000 --- a/pipenv/patched/prettytoml/elements/metadata.py +++ /dev/null @@ -1,80 +0,0 @@ -from prettytoml import tokens -from prettytoml.elements import common -from .errors import InvalidElementError - - -class WhitespaceElement(common.TokenElement): - """ - An element that contains tokens of whitespace - """ - - def __init__(self, _tokens): - common.TokenElement.__init__(self, _tokens, common.TYPE_METADATA) - - def _validate_tokens(self, _tokens): - for token in _tokens: - if token.type != tokens.TYPE_WHITESPACE: - raise InvalidElementError('Tokens making up a WhitespaceElement must all be whitespace') - - @property - def length(self): - """ - The whitespace length of this element - """ - return len(self.tokens) - - -class NewlineElement(common.TokenElement): - """ - An element containing newline tokens - - Raises: - InvalidElementError: when passed an invalid sequence of tokens. - """ - - def __init__(self, _tokens): - common.TokenElement.__init__(self, _tokens, common.TYPE_METADATA) - - def _validate_tokens(self, _tokens): - for token in _tokens: - if token.type != tokens.TYPE_NEWLINE: - raise InvalidElementError('Tokens making a NewlineElement must all be newlines') - - -class CommentElement(common.TokenElement): - """ - An element containing a single comment token followed by a newline. - - Raises: - InvalidElementError: when passed an invalid sequence of tokens. - """ - - def __init__(self, _tokens): - common.TokenElement.__init__(self, _tokens, common.TYPE_METADATA) - - def _validate_tokens(self, _tokens): - if len(_tokens) != 2 or _tokens[0].type != tokens.TYPE_COMMENT or _tokens[1].type != tokens.TYPE_NEWLINE: - raise InvalidElementError('CommentElement needs one comment token followed by one newline token') - - -class PunctuationElement(common.TokenElement): - """ - An element containing a single punctuation token. - - Raises: - InvalidElementError: when passed an invalid sequence of tokens. - """ - - def __init__(self, _tokens): - common.TokenElement.__init__(self, _tokens, common.TYPE_METADATA) - - @property - def token(self): - """ - Returns the token contained in this Element. - """ - return self.tokens[0] - - def _validate_tokens(self, _tokens): - if not _tokens or not tokens.is_operator(_tokens[0]): - raise InvalidElementError('PunctuationElement must be made of only a single operator token') diff --git a/pipenv/patched/prettytoml/elements/table.py b/pipenv/patched/prettytoml/elements/table.py deleted file mode 100644 index cdc3ed4c51..0000000000 --- a/pipenv/patched/prettytoml/elements/table.py +++ /dev/null @@ -1,122 +0,0 @@ -from prettytoml.elements import abstracttable, factory -from prettytoml.elements.errors import InvalidElementError -from prettytoml.elements.common import Element -from prettytoml.elements.metadata import CommentElement, NewlineElement, WhitespaceElement -from . import common - - -class TableElement(abstracttable.AbstractTable): - """ - An Element containing an unnamed top-level table. - - Implements dict-like interface. - - Assumes input sub_elements are correct. - - Raises InvalidElementError on duplicate keys. - """ - - def __init__(self, sub_elements): - abstracttable.AbstractTable.__init__(self, sub_elements) - - self._check_for_duplicate_keys() - - def _check_for_duplicate_keys(self): - if len(set(self.keys())) < len(self.keys()): - raise InvalidElementError('Duplicate keys found') - - def __setitem__(self, key, value): - if key in self: - self._update(key, value) - else: - self._insert(key, value) - - def _update(self, key, value): - _, value_i = self._find_key_and_value(key) - self._sub_elements[value_i] = value if isinstance(value, Element) else factory.create_element(value) - - def _find_insertion_index(self): - """ - Returns the self.sub_elements index in which new entries should be inserted. - """ - - non_metadata_elements = tuple(self._enumerate_non_metadata_sub_elements()) - - if not non_metadata_elements: - return 0 - - last_entry_i = non_metadata_elements[-1][0] - following_newline_i = self._find_following_line_terminator(last_entry_i) - - return following_newline_i + 1 - - def _detect_indentation_size(self): - """ - Detects the level of indentation used in this table. - """ - - def lines(): - # Returns a sequence of sequences of elements belonging to each line - start = 0 - for i, element in enumerate(self.elements): - if isinstance(element, (CommentElement, NewlineElement)): - yield self.elements[start:i+1] - start = i+1 - - def indentation(line): - # Counts the number of whitespace tokens at the beginning of this line - try: - first_non_whitespace_i = next(i for (i, e) in enumerate(line) if not isinstance(e, WhitespaceElement)) - return sum(space.length for space in line[:first_non_whitespace_i]) - except StopIteration: - return 0 - - def is_empty_line(line): - return all(e.type == common.TYPE_METADATA for e in line) - - try: - return min(indentation(line) for line in lines() if len(line) > 1 and not is_empty_line(line)) - except ValueError: # Raised by ValueError when no matching lines found - return 0 - - def _insert(self, key, value): - - value_element = value if isinstance(value, Element) else factory.create_element(value) - - indentation_size = self._detect_indentation_size() - indentation = [factory.create_whitespace_element(self._detect_indentation_size())] if indentation_size else [] - - inserted_elements = indentation + [ - factory.create_string_element(key, bare_allowed=True), - factory.create_whitespace_element(), - factory.create_operator_element('='), - factory.create_whitespace_element(), - value_element, - factory.create_newline_element(), - ] - - insertion_index = self._find_insertion_index() - - self._sub_elements = \ - self.sub_elements[:insertion_index] + inserted_elements + self.sub_elements[insertion_index:] - - def __delitem__(self, key): - begin, _ = self._find_key_and_value(key) - preceding_newline = self._find_preceding_newline(begin) - if preceding_newline >= 0: - begin = preceding_newline - end = self._find_following_line_terminator(begin) - if end < 0: - end = len(tuple(self._sub_elements)) - self._sub_elements = self.sub_elements[:begin] + self.sub_elements[end:] - - def pop(self, key): - v = self[key] - del self[key] - return v - - def value(self): - return self - - def __str__(self): - return str(self.primitive_value) diff --git a/pipenv/patched/prettytoml/elements/tableheader.py b/pipenv/patched/prettytoml/elements/tableheader.py deleted file mode 100644 index eacd88b90f..0000000000 --- a/pipenv/patched/prettytoml/elements/tableheader.py +++ /dev/null @@ -1,95 +0,0 @@ -from prettytoml import tokens -from prettytoml.tokens import toml2py -from prettytoml.elements import common -from prettytoml.elements.common import Element, TokenElement -from prettytoml.elements.errors import InvalidElementError - -_opening_bracket_types = (tokens.TYPE_OP_SQUARE_LEFT_BRACKET, tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET) -_closing_bracket_types = (tokens.TYPE_OP_SQUARE_RIGHT_BRACKET, tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET) -_name_types = ( - tokens.TYPE_BARE_STRING, - tokens.TYPE_LITERAL_STRING, - tokens.TYPE_STRING, -) - - -class TableHeaderElement(TokenElement): - """ - An element containing opening and closing single and double square brackets, strings and dots and ending with - a newline. - - Raises InvalidElementError. - """ - - def __init__(self, _tokens): - TokenElement.__init__(self, _tokens, common.TYPE_MARKUP) - self._names = tuple(toml2py.deserialize(token) for token in self._tokens if token.type in _name_types) - - @property - def is_array_of_tables(self): - opening_bracket = next(token for i, token in enumerate(self._tokens) if token.type in _opening_bracket_types) - return opening_bracket.type == tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET - - @property - def names(self): - """ - Returns a sequence of string names making up this table header name. - """ - return self._names - - def has_name_prefix(self, names): - """ - Returns True if the header names is prefixed by the given sequence of names. - """ - for i, name in enumerate(names): - if self.names[i] != name: - return False - return True - - def serialized(self): - return ''.join(token.source_substring for token in self._tokens) - - def is_named(self, names): - """ - Returns True if the given name sequence matches the full name of this header. - """ - return tuple(names) == self.names - - def _validate_tokens(self, _tokens): - - opening_bracket_i = next((i for i, token in enumerate(_tokens) - if token.type in _opening_bracket_types), float('-inf')) - - if opening_bracket_i < 0: - raise InvalidElementError('Expected an opening bracket') - - _tokens = _tokens[opening_bracket_i+1:] - first_name_i = next((i for i, token in enumerate(_tokens) if token.type in _name_types), float('-inf')) - if first_name_i < 0: - raise InvalidElementError('Expected a table header name') - - _tokens = _tokens[first_name_i+1:] - - while True: - - next_dot_i = next((i for i, token in enumerate(_tokens) if token.type == tokens.TYPE_OPT_DOT), - float('-inf')) - if next_dot_i < 0: - break - - _tokens = _tokens[next_dot_i+1:] - - next_name_i = next((i for i, token in enumerate(_tokens) if token.type in _name_types), float('-inf')) - if next_name_i < 0: - raise InvalidElementError('Expected a name after the dot') - - _tokens = _tokens[next_name_i+1:] - - closing_bracket_i = next((i for i, token in enumerate(_tokens) if token.type in _closing_bracket_types), - float('-inf')) - - if closing_bracket_i < 0: - raise InvalidElementError('Expected a closing bracket') - - if _tokens[-1].type != tokens.TYPE_NEWLINE: - raise InvalidElementError('Must end with a newline') diff --git a/pipenv/patched/prettytoml/elements/test_array.py b/pipenv/patched/prettytoml/elements/test_array.py deleted file mode 100644 index 3ccc98b525..0000000000 --- a/pipenv/patched/prettytoml/elements/test_array.py +++ /dev/null @@ -1,67 +0,0 @@ -import pytest -from prettytoml import lexer -from prettytoml.elements.array import ArrayElement -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import PunctuationElement, WhitespaceElement, NewlineElement - - -def test_array_element(): - tokens = tuple(lexer.tokenize('[4, 8, 42, \n 23, 15]')) - assert len(tokens) == 17 - sub_elements = ( - PunctuationElement(tokens[:1]), - - AtomicElement(tokens[1:2]), - PunctuationElement(tokens[2:3]), - WhitespaceElement(tokens[3:4]), - - AtomicElement(tokens[4:5]), - PunctuationElement(tokens[5:6]), - WhitespaceElement(tokens[6:7]), - - AtomicElement(tokens[7:8]), - PunctuationElement(tokens[8:9]), - WhitespaceElement(tokens[9:10]), - NewlineElement(tokens[10:11]), - WhitespaceElement(tokens[11:12]), - - AtomicElement(tokens[12:13]), - PunctuationElement(tokens[13:14]), - - WhitespaceElement(tokens[14:15]), - AtomicElement(tokens[15:16]), - PunctuationElement(tokens[16:17]) - ) - - array_element = ArrayElement(sub_elements) - - # Test length - assert len(array_element) == 5 - - # Test getting a value - assert array_element[0] == 4 - assert array_element[1] == 8 - assert array_element[2] == 42 - assert array_element[3] == 23 - assert array_element[-1] == 15 - - # Test assignment with a negative index - array_element[-1] = 12 - - # Test persistence of formatting - assert '[4, 8, 42, \n 23, 12]' == array_element.serialized() - - # Test raises IndexError on invalid index - with pytest.raises(IndexError) as _: - print(array_element[5]) - - # Test appending a new value - array_element.append(77) - assert '[4, 8, 42, \n 23, 12, 77]' == array_element.serialized() - - # Test deleting a value - del array_element[3] - assert '[4, 8, 42, 12, 77]' == array_element.serialized() - - # Test primitive_value - assert [4, 8, 42, 12, 77] == array_element.primitive_value diff --git a/pipenv/patched/prettytoml/elements/test_atomic.py b/pipenv/patched/prettytoml/elements/test_atomic.py deleted file mode 100644 index 940ddd2757..0000000000 --- a/pipenv/patched/prettytoml/elements/test_atomic.py +++ /dev/null @@ -1,9 +0,0 @@ -from prettytoml import lexer -from prettytoml.elements.atomic import AtomicElement - - -def test_atomic_element(): - element = AtomicElement(tuple(lexer.tokenize(' \t 42 '))) - assert element.value == 42 - element.set(23) - assert element.serialized() == ' \t 23 ' diff --git a/pipenv/patched/prettytoml/elements/test_common.py b/pipenv/patched/prettytoml/elements/test_common.py deleted file mode 100644 index 9f5dd4c8f9..0000000000 --- a/pipenv/patched/prettytoml/elements/test_common.py +++ /dev/null @@ -1,89 +0,0 @@ -from prettytoml import tokens, lexer -from prettytoml.elements import traversal -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import NewlineElement, PunctuationElement, WhitespaceElement, CommentElement -from prettytoml.elements.table import TableElement -from prettytoml.elements.tableheader import TableHeaderElement - -atomic_token_types = ( - tokens.TYPE_INTEGER, - tokens.TYPE_FLOAT, - tokens.TYPE_BARE_STRING, - tokens.TYPE_STRING, - tokens.TYPE_LITERAL_STRING, - tokens.TYPE_MULTILINE_STRING, - tokens.TYPE_MULTILINE_LITERAL_STRING, -) - -punctuation_token_types = ( - tokens.TYPE_OPT_DOT, - tokens.TYPE_OP_CURLY_LEFT_BRACKET, - tokens.TYPE_OP_SQUARE_LEFT_BRACKET, - tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET, - tokens.TYPE_OP_SQUARE_RIGHT_BRACKET, - tokens.TYPE_OP_CURLY_RIGHT_BRACKET, - tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET, - tokens.TYPE_OP_ASSIGNMENT, -) - -def primitive_token_to_primitive_element(token): - if token.type == tokens.TYPE_NEWLINE: - return NewlineElement((token,)) - elif token.type in atomic_token_types: - return AtomicElement((token,)) - elif token.type == tokens.TYPE_NEWLINE: - return NewlineElement((token,)) - elif token.type in punctuation_token_types: - return PunctuationElement((token,)) - elif token.type == tokens.TYPE_WHITESPACE: - return WhitespaceElement((token,)) - elif token.type == tokens.TYPE_COMMENT: - return CommentElement((token,)) - else: - raise RuntimeError("{} has no mapped primitive element".format(token)) - - -def primitive_tokens_to_primitive_elements(tokens): - return list(map(primitive_token_to_primitive_element, tokens)) - - -def dummy_file_elements(): - tokens_ = tuple(lexer.tokenize(""" -name = fawzy -another_name=another_fawzy - -[details] -id= 42 -section =fourth - -[[person]] -personname= lefawzy -dest=north - -[[person]] -dest=south -personname=lafawzy - -[details.extended] -number = 313 -type =complex""")) - - elements = \ - [TableElement(primitive_tokens_to_primitive_elements(tokens_[:12]))] + \ - [TableHeaderElement(tokens_[12:16])] + \ - [TableElement(primitive_tokens_to_primitive_elements(tokens_[16:25]))] + \ - [TableHeaderElement(tokens_[25:31])] + \ - [TableElement(primitive_tokens_to_primitive_elements(tokens_[31:39]))] + \ - [TableHeaderElement(tokens_[39:45])] + \ - [TableElement(primitive_tokens_to_primitive_elements(tokens_[45:53]))] + \ - [TableHeaderElement(tokens_[53:60])] + \ - [TableElement(primitive_tokens_to_primitive_elements(tokens_[60:]))] - - return elements - - -class DummyFile(traversal.TraversalMixin): - - @property - def elements(self): - return dummy_file_elements() diff --git a/pipenv/patched/prettytoml/elements/test_factory.py b/pipenv/patched/prettytoml/elements/test_factory.py deleted file mode 100644 index 08a4288301..0000000000 --- a/pipenv/patched/prettytoml/elements/test_factory.py +++ /dev/null @@ -1,22 +0,0 @@ -from collections import OrderedDict -from prettytoml.elements import factory -from prettytoml.elements.array import ArrayElement -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.inlinetable import InlineTableElement - - -def test_creating_elements(): - - atomic = factory.create_element(42) - assert isinstance(atomic, AtomicElement) - assert atomic.value == 42 - - seq = factory.create_element(['a', 'p', 'p', 'l', 'e']) - assert isinstance(seq, ArrayElement) - assert seq.serialized() == '["a", "p", "p", "l", "e"]' - assert ''.join(seq.primitive_value) == 'apple' - - mapping = factory.create_element(OrderedDict((('one', 1), ('two', 2)))) - assert isinstance(mapping, InlineTableElement) - assert mapping.serialized() == '{one = 1, two = 2}' - diff --git a/pipenv/patched/prettytoml/elements/test_inlinetable.py b/pipenv/patched/prettytoml/elements/test_inlinetable.py deleted file mode 100644 index 3c663873ed..0000000000 --- a/pipenv/patched/prettytoml/elements/test_inlinetable.py +++ /dev/null @@ -1,52 +0,0 @@ -from prettytoml import lexer -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.inlinetable import InlineTableElement -from prettytoml.elements.metadata import PunctuationElement, WhitespaceElement - - -def test_inline_table(): - tokens = tuple(lexer.tokenize('{ name= "first", id=42}')) - - elements = ( - PunctuationElement(tokens[:1]), - WhitespaceElement(tokens[1:2]), - AtomicElement(tokens[2:3]), - PunctuationElement(tokens[3:4]), - WhitespaceElement(tokens[4:5]), - AtomicElement(tokens[5:6]), - PunctuationElement(tokens[6:7]), - WhitespaceElement(tokens[7:8]), - AtomicElement(tokens[8:9]), - PunctuationElement(tokens[9:10]), - AtomicElement(tokens[10:11]), - PunctuationElement(tokens[11:12]) - ) - - table = InlineTableElement(elements) - - assert table['name'] == 'first' - assert table['id'] == 42 - - table['name'] = 'fawzy' - table['nickname'] = 'nickfawzy' - - assert set(table.items()) == {('name', 'fawzy'), ('id', 42), ('nickname', 'nickfawzy')} - - assert table.serialized() == '{ name= "fawzy", id=42, nickname = "nickfawzy"}' - - del table['name'] - - assert table.serialized() == '{ id=42, nickname = "nickfawzy"}' - - del table['nickname'] - - assert table.serialized() == '{ id=42}' - - del table['id'] - - assert table.serialized() == '{ }' - - table['item1'] = 11 - table['item2'] = 22 - - assert table.serialized() == '{ item1 = 11, item2 = 22}' diff --git a/pipenv/patched/prettytoml/elements/test_metadata.py b/pipenv/patched/prettytoml/elements/test_metadata.py deleted file mode 100644 index 6e49fedd72..0000000000 --- a/pipenv/patched/prettytoml/elements/test_metadata.py +++ /dev/null @@ -1,25 +0,0 @@ -from prettytoml import lexer -from prettytoml.elements.metadata import WhitespaceElement, NewlineElement, CommentElement, PunctuationElement - - -def test_whitespace_element(): - element = WhitespaceElement(tuple(lexer.tokenize(' \t '))) - assert element.serialized() == ' \t ' - - -def test_newline_element(): - element = NewlineElement(tuple(lexer.tokenize('\n\n\n'))) - assert element.serialized() == '\n\n\n' - - -def test_comment_element(): - element = CommentElement(tuple(lexer.tokenize('# This is my insightful remark\n'))) - assert element.serialized() == '# This is my insightful remark\n' - - -def test_punctuation_element(): - PunctuationElement(tuple(lexer.tokenize('['))) - PunctuationElement(tuple(lexer.tokenize('[['))) - PunctuationElement(tuple(lexer.tokenize('.'))) - PunctuationElement(tuple(lexer.tokenize(']'))) - PunctuationElement(tuple(lexer.tokenize(']]'))) diff --git a/pipenv/patched/prettytoml/elements/test_table.py b/pipenv/patched/prettytoml/elements/test_table.py deleted file mode 100644 index ef0dba8132..0000000000 --- a/pipenv/patched/prettytoml/elements/test_table.py +++ /dev/null @@ -1,59 +0,0 @@ -from prettytoml import lexer -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import WhitespaceElement, PunctuationElement, NewlineElement, CommentElement -from prettytoml.elements.table import TableElement - - -def test_table(): - - initial_toml = """name = "first" -id=42 # My id - - -""" - - tokens = tuple(lexer.tokenize(initial_toml)) - - elements = ( - AtomicElement(tokens[:1]), - WhitespaceElement(tokens[1:2]), - PunctuationElement(tokens[2:3]), - WhitespaceElement(tokens[3:4]), - AtomicElement(tokens[4:5]), - NewlineElement(tokens[5:6]), - - AtomicElement(tokens[6:7]), - PunctuationElement(tokens[7:8]), - AtomicElement(tokens[8:9]), - WhitespaceElement(tokens[9:10]), - CommentElement(tokens[10:12]), - - NewlineElement(tokens[12:13]), - NewlineElement(tokens[13:14]), - ) - - table = TableElement(elements) - - assert set(table.items()) == {('name', 'first'), ('id', 42)} - - assert table['name'] == 'first' - assert table['id'] == 42 - - table['relation'] = 'another' - - assert set(table.items()) == {('name', 'first'), ('id', 42), ('relation', 'another')} - - table['name'] = 'fawzy' - - assert set(table.items()) == {('name', 'fawzy'), ('id', 42), ('relation', 'another')} - - expected_toml = """name = "fawzy" -id=42 # My id -relation = "another" - - -""" - - assert table.serialized() == expected_toml - - diff --git a/pipenv/patched/prettytoml/elements/test_tableheader.py b/pipenv/patched/prettytoml/elements/test_tableheader.py deleted file mode 100644 index 67b1ded49c..0000000000 --- a/pipenv/patched/prettytoml/elements/test_tableheader.py +++ /dev/null @@ -1,12 +0,0 @@ -from prettytoml import lexer -from prettytoml.elements.tableheader import TableHeaderElement - - -def test_tableheader(): - tokens = tuple(lexer.tokenize('\n\t [[personal. information.details]] \n')) - element = TableHeaderElement(tokens) - - assert element.is_array_of_tables - assert ('personal', 'information', 'details') == element.names - - assert element.has_name_prefix(('personal', 'information')) diff --git a/pipenv/patched/prettytoml/elements/test_traversal.py b/pipenv/patched/prettytoml/elements/test_traversal.py deleted file mode 100644 index 2f00257165..0000000000 --- a/pipenv/patched/prettytoml/elements/test_traversal.py +++ /dev/null @@ -1,18 +0,0 @@ -from prettytoml.elements.test_common import DummyFile - - -def test_traversal(): - dummy_file = DummyFile() - - assert dummy_file._find_following_table_header(-1) == 1 - assert dummy_file._find_following_table_header(1) == 3 - assert dummy_file._find_following_table_header(3) == 5 - assert dummy_file._find_following_table_header(5) == 7 - assert dummy_file._find_following_table_header(7) < 0 - - assert dummy_file._find_preceding_table(30) == 8 - assert dummy_file._find_preceding_table(8) == 6 - assert dummy_file._find_preceding_table(6) == 4 - assert dummy_file._find_preceding_table(4) == 2 - assert dummy_file._find_preceding_table(2) == 0 - assert dummy_file._find_preceding_table(0) < 0 diff --git a/pipenv/patched/prettytoml/elements/traversal/__init__.py b/pipenv/patched/prettytoml/elements/traversal/__init__.py deleted file mode 100644 index c93506e2ed..0000000000 --- a/pipenv/patched/prettytoml/elements/traversal/__init__.py +++ /dev/null @@ -1,175 +0,0 @@ -from prettytoml import tokens -from prettytoml.elements import common -from prettytoml.elements.metadata import PunctuationElement, NewlineElement -from prettytoml.elements.traversal import predicates - - -class TraversalMixin: - """ - A mix-in that provides convenient sub-element traversal to any class with - an `elements` member that is a sequence of Element instances - """ - - def __find_following_element(self, index, predicate): - """ - Finds and returns the index of element in self.elements that evaluates the given predicate to True - and whose index is higher than the given index, or returns -Infinity on failure. - """ - return find_following(self.elements, predicate, index) - - def __find_preceding_element(self, index, predicate): - """ - Finds and returns the index of the element in self.elements that evaluates the given predicate to True - and whose index is lower than the given index. - """ - i = find_previous(self.elements, predicate, index) - if i == float('inf'): - return float('-inf') - return i - - def __must_find_following_element(self, predicate): - """ - Finds and returns the index to the element in self.elements that evaluatest the predicate to True, or raises - an error. - """ - i = self.__find_following_element(-1, predicate) - if i < 0: - raise RuntimeError('Could not find non-optional element') - return i - - def _enumerate_non_metadata_sub_elements(self): - """ - Returns a sequence of of (index, sub_element) of the non-metadata sub-elements. - """ - return ((i, element) for i, element in enumerate(self.elements) if element.type != common.TYPE_METADATA) - - def _find_preceding_comma(self, index): - """ - Returns the index of the preceding comma element to the given index, or -Infinity. - """ - return self.__find_preceding_element(index, predicates.op_comma) - - def _find_following_comma(self, index): - """ - Returns the index of the following comma element after the given index, or -Infinity. - """ - def predicate(element): - return isinstance(element, PunctuationElement) and element.token.type == tokens.TYPE_OP_COMMA - return self.__find_following_element(index, predicate) - - def _find_following_newline(self, index): - """ - Returns the index of the following newline element after the given index, or -Infinity. - """ - return self.__find_following_element(index, lambda e: isinstance(e, NewlineElement)) - - def _find_following_comment(self, index): - """ - Returns the index of the following comment element after the given index, or -Infinity. - """ - return self.__find_following_element(index, predicates.comment) - - def _find_following_line_terminator(self, index): - """ - Returns the index of the following comment or newline element after the given index, or -Infinity. - """ - following_comment = self._find_following_comment(index) - following_newline = self._find_following_newline(index) - - if following_comment == float('-inf'): - return following_newline - if following_newline == float('-inf'): - return following_comment - - if following_newline < following_comment: - return following_newline - else: - return following_comment - - def _find_preceding_newline(self, index): - """ - Returns the index of the preceding newline element to the given index, or -Infinity. - """ - return self.__find_preceding_element(index, predicates.newline) - - def _find_following_non_metadata(self, index): - """ - Returns the index to the following non-metadata element after the given index, or -Infinity. - """ - return self.__find_following_element(index, predicates.non_metadata) - - def _find_closing_square_bracket(self): - """ - Returns the index to the closing square bracket, or raises an Error. - """ - - return self.__must_find_following_element(predicates.closing_square_bracket) - - def _find_following_opening_square_bracket(self, index): - """ - Returns the index to the opening square bracket, or -Infinity. - """ - return self.__find_following_element(index, predicates.opening_square_bracket) - - def _find_following_closing_square_bracket(self, index): - """ - Returns the index to the closing square bracket, or -Infinity. - """ - return self.__find_following_element(index, predicates.closing_square_bracket) - - def _find_following_table(self, index): - """ - Returns the index to the next TableElement after the specified index, or -Infinity. - """ - return self.__find_following_element(index, predicates.table) - - def _find_preceding_table(self, index): - """ - Returns the index to the preceding TableElement to the specified index, or -Infinity. - """ - return self.__find_preceding_element(index,predicates.table) - - def _find_closing_curly_bracket(self): - """ - Returns the index to the closing curly bracket, or raises an Error. - """ - def predicate(element): - return isinstance(element, PunctuationElement) and element.token.type == tokens.TYPE_OP_CURLY_RIGHT_BRACKET - return self.__must_find_following_element(predicate) - - def _find_following_table_header(self, index): - """ - Returns the index to the table header after the given element index, or -Infinity. - """ - return self.__find_following_element(index, predicates.table_header) - - -def find_following(element_seq, predicate, index=None): - """ - Finds and returns the index of the next element fulfilling the specified predicate after the specified - index, or -Infinity. - - Starts searching linearly from the start_from index. - """ - - if isinstance(index, (int, float)) and index < 0: - index = None - - for i, element in tuple(enumerate(element_seq))[index+1 if index is not None else index:]: - if predicate(element): - return i - return float('-inf') - - -def find_previous(element_seq, predicate, index=None): - """ - Finds and returns the index of the previous element fulfilling the specified predicate preceding to the specified - index, or Infinity. - """ - if isinstance(index, (int, float)) and index >= len(element_seq): - index = None - - for i, element in reversed(tuple(enumerate(element_seq))[:index]): - if predicate(element): - return i - return float('inf') diff --git a/pipenv/patched/prettytoml/elements/traversal/predicates.py b/pipenv/patched/prettytoml/elements/traversal/predicates.py deleted file mode 100644 index f18616bfda..0000000000 --- a/pipenv/patched/prettytoml/elements/traversal/predicates.py +++ /dev/null @@ -1,48 +0,0 @@ - -""" - The following predicates can be used in the traversal functions directly. -""" - -from ..atomic import AtomicElement -from ..metadata import PunctuationElement, CommentElement, NewlineElement, WhitespaceElement -from prettytoml import tokens -from .. import common - - -atomic = lambda e: isinstance(e, AtomicElement) - - -op_assignment = lambda e: isinstance(e, PunctuationElement) and e.token.type == tokens.TYPE_OP_ASSIGNMENT - - -op_comma = lambda e: isinstance(e, PunctuationElement) and e.token.type == tokens.TYPE_OP_COMMA - - -comment = lambda e: isinstance(e, CommentElement) - - -newline = lambda e: isinstance(e, NewlineElement) - - -non_metadata = lambda e: e.type != common.TYPE_METADATA - - -closing_square_bracket = \ - lambda e: isinstance(e, PunctuationElement) and e.token.type == tokens.TYPE_OP_SQUARE_RIGHT_BRACKET - - -opening_square_bracket = \ - lambda e: isinstance(e, PunctuationElement) and e.token.type == tokens.TYPE_OP_SQUARE_LEFT_BRACKET - - -def table(e): - from ..table import TableElement - return isinstance(e, TableElement) - - -def table_header(e): - from prettytoml.elements.tableheader import TableHeaderElement - return isinstance(e, TableHeaderElement) - - -whitespace = lambda e: isinstance(e, WhitespaceElement) diff --git a/pipenv/patched/prettytoml/errors.py b/pipenv/patched/prettytoml/errors.py deleted file mode 100644 index 23e69eb583..0000000000 --- a/pipenv/patched/prettytoml/errors.py +++ /dev/null @@ -1,32 +0,0 @@ - - -class TOMLError(Exception): - """ - All errors raised by this module are descendants of this type. - """ - - -class InvalidTOMLFileError(TOMLError): - pass - - -class NoArrayFoundError(TOMLError): - """ - An array of tables was requested but none exist by the given name. - """ - - -class InvalidValueError(TOMLError): - pass - - -class DuplicateKeysError(TOMLError): - """ - Duplicate keys detected in the parsed file. - """ - - -class DuplicateTablesError(TOMLError): - """ - Duplicate tables detected in the parsed file. - """ diff --git a/pipenv/patched/prettytoml/lexer/__init__.py b/pipenv/patched/prettytoml/lexer/__init__.py deleted file mode 100644 index da32963a14..0000000000 --- a/pipenv/patched/prettytoml/lexer/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ - -""" -A regular expression based Lexer/tokenizer for TOML. -""" - -from collections import namedtuple -import re -from prettytoml import tokens -from prettytoml.errors import TOMLError - -TokenSpec = namedtuple('TokenSpec', ('type', 're')) - -# Specs of all the valid tokens -_LEXICAL_SPECS = ( - TokenSpec(tokens.TYPE_COMMENT, re.compile(r'^(#.*)\n')), - TokenSpec(tokens.TYPE_STRING, re.compile(r'^("(([^"]|\\")+?[^\\]|([^"]|\\")|)")')), # Single line only - TokenSpec(tokens.TYPE_MULTILINE_STRING, re.compile(r'^(""".*?""")', re.DOTALL)), - TokenSpec(tokens.TYPE_LITERAL_STRING, re.compile(r"^('.*?')")), - TokenSpec(tokens.TYPE_MULTILINE_LITERAL_STRING, re.compile(r"^('''.*?''')", re.DOTALL)), - TokenSpec(tokens.TYPE_BARE_STRING, re.compile(r'^([A-Za-z0-9_-]+)')), - TokenSpec(tokens.TYPE_DATE, re.compile( - r'^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]*)?)?(([zZ])|((\+|-)[0-9]{2}:[0-9]{2}))?)')), - TokenSpec(tokens.TYPE_WHITESPACE, re.compile(r'^( |\t)', re.DOTALL)), - TokenSpec(tokens.TYPE_INTEGER, re.compile(r'^(((\+|-)[0-9_]+)|([0-9][0-9_]*))')), - TokenSpec(tokens.TYPE_FLOAT, - re.compile(r'^((((\+|-)[0-9_]+)|([1-9][0-9_]*))(\.[0-9_]+)?([eE](\+|-)?[0-9_]+)?)')), - TokenSpec(tokens.TYPE_BOOLEAN, re.compile(r'^(true|false)')), - TokenSpec(tokens.TYPE_OP_SQUARE_LEFT_BRACKET, re.compile(r'^(\[)')), - TokenSpec(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET, re.compile(r'^(\])')), - TokenSpec(tokens.TYPE_OP_CURLY_LEFT_BRACKET, re.compile(r'^(\{)')), - TokenSpec(tokens.TYPE_OP_CURLY_RIGHT_BRACKET, re.compile(r'^(\})')), - TokenSpec(tokens.TYPE_OP_ASSIGNMENT, re.compile(r'^(=)')), - TokenSpec(tokens.TYPE_OP_COMMA, re.compile(r'^(,)')), - TokenSpec(tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET, re.compile(r'^(\[\[)')), - TokenSpec(tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET, re.compile(r'^(\]\])')), - TokenSpec(tokens.TYPE_OPT_DOT, re.compile(r'^(\.)')), - TokenSpec(tokens.TYPE_NEWLINE, re.compile('^(\n|\r\n)')), -) - - -def _next_token_candidates(source): - matches = [] - for token_spec in _LEXICAL_SPECS: - match = token_spec.re.search(source) - if match: - matches.append(tokens.Token(token_spec.type, match.group(1))) - return matches - - -def _choose_from_next_token_candidates(candidates): - - if len(candidates) == 1: - return candidates[0] - elif len(candidates) > 1: - # Return the maximal-munch with ties broken by natural order of token type. - maximal_munch_length = max(len(token.source_substring) for token in candidates) - maximal_munches = [token for token in candidates if len(token.source_substring) == maximal_munch_length] - return sorted(maximal_munches)[0] # Return the first in sorting by priority - - -def _munch_a_token(source): - """ - Munches a single Token instance if it could recognize one at the beginning of the - given source text, or None if no token type could be recognized. - """ - candidates = _next_token_candidates(source) - return _choose_from_next_token_candidates(candidates) - - -class LexerError(TOMLError): - - def __init__(self, message): - self._message = message - - def __repr__(self): - return self._message - - def __str__(self): - return self._message - - -def tokenize(source, is_top_level=False): - """ - Tokenizes the input TOML source into a stream of tokens. - - If is_top_level is set to True, will make sure that the input source has a trailing newline character - before it is tokenized. - - Raises a LexerError when it fails recognize another token while not at the end of the source. - """ - - # Newlines are going to be normalized to UNIX newlines. - source = source.replace('\r\n', '\n') - - if is_top_level and source and source[-1] != '\n': - source += '\n' - - next_row = 1 - next_col = 1 - next_index = 0 - - while next_index < len(source): - - new_token = _munch_a_token(source[next_index:]) - - if not new_token: - raise LexerError("failed to read the next token at ({}, {}): {}".format( - next_row, next_col, source[next_index:])) - - # Set the col and row on the new token - new_token = tokens.Token(new_token.type, new_token.source_substring, next_col, next_row) - - # Advance the index, row and col count - next_index += len(new_token.source_substring) - for c in new_token.source_substring: - if c == '\n': - next_row += 1 - next_col = 1 - else: - next_col += 1 - - yield new_token - diff --git a/pipenv/patched/prettytoml/lexer/test_lexer.py b/pipenv/patched/prettytoml/lexer/test_lexer.py deleted file mode 100644 index df10b46dfd..0000000000 --- a/pipenv/patched/prettytoml/lexer/test_lexer.py +++ /dev/null @@ -1,153 +0,0 @@ -# -*- coding: utf-8 -*- - -from prettytoml.lexer import _munch_a_token -from prettytoml.lexer import * - -# A mapping from token types to a sequence of pairs of (source_text, expected_matched_text) -valid_tokens = { - tokens.TYPE_COMMENT: ( - ( - '# My very insightful comment about the state of the universe\n# And now for something completely different!', - '# My very insightful comment about the state of the universe', - ), - ), - tokens.TYPE_STRING: ( - ('"a valid hug3 text" "some other string" = 42', '"a valid hug3 text"'), - ( - r'"I\'m a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." "some other string" = 42', - r'"I\'m a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."' - ), - ('"ʎǝʞ" key', '"ʎǝʞ"'), - ('""', '""'), - ('"t"', '"t"'), - ), - tokens.TYPE_MULTILINE_STRING: ( - ('"""\nRoses are red\nViolets are blue""" """other text"""', '"""\nRoses are red\nViolets are blue"""'), - ), - tokens.TYPE_LITERAL_STRING: ( - (r"'This is \ \n a \\ literal string' 'another \ literal string'", r"'This is \ \n a \\ literal string'"), - ), - tokens.TYPE_MULTILINE_LITERAL_STRING: ( - ( - "'''\nThe first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n''' '''some other\n\n\t string'''", - "'''\nThe first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n'''" - ), - ), - tokens.TYPE_DATE: ( - ('1979-05-27 5345', '1979-05-27'), - ('1979-05-27T07:32:00Z something', '1979-05-27T07:32:00Z'), - ('1979-05-27T00:32:00-07:00 ommm', '1979-05-27T00:32:00-07:00'), - ('1979-05-27T00:32:00.999999-07:00 2346', '1979-05-27T00:32:00.999999-07:00'), - ), - tokens.TYPE_WHITESPACE: ( - (' \t\n \r some_text', ' '), - ), - tokens.TYPE_INTEGER: ( - ('+99 "number"', "+99"), - ('42 fwfwef', "42"), - ('-17 fh34g34g', "-17"), - ('5_349_221 apples', "5_349_221"), - ('-1_2_3_4_5 steps', '-1_2_3_4_5') - ), - tokens.TYPE_FLOAT: ( - ('1.0 fwef', '1.0'), - ('3.1415 g4g', '3.1415'), - ('-0.01 433re', '-0.01'), - ('5e+2_2 ersdvf', '5e+2_2'), - ('1e6 ewe23', '1e6'), - ('-2E-2.2 3 rf23', '-2E-2'), - ('6.626e-34 +234f', '6.626e-34'), - ('9_224_617.445_991_228_313 f1ewer 23f4h = nonesense', '9_224_617.445_991_228_313'), - ('1e1_000 2346f,ef2!!', '1e1_000'), - ), - tokens.TYPE_BOOLEAN: ( - ('false business = true', 'false'), - ('true true', 'true'), - ), - tokens.TYPE_OP_SQUARE_LEFT_BRACKET: ( - ('[table_name]', '['), - ), - tokens.TYPE_OP_SQUARE_RIGHT_BRACKET: ( - (']\nbusiness = awesome', ']'), - ), - tokens.TYPE_OP_CURLY_LEFT_BRACKET: ( - ('{item_exists = no}', '{'), - ), - tokens.TYPE_OP_CURLY_RIGHT_BRACKET: ( - ('} moving on', '}'), - ), - tokens.TYPE_OP_COMMA: ( - (',item2,item4', ','), - ), - tokens.TYPE_OP_ASSIGNMENT: ( - ('== 42', '='), - ), - tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET: ( - ('[[array.of.tables]]', '[['), - ), - tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET: ( - (']] item=3', ']]'), - ), - tokens.TYPE_BARE_STRING: ( - ('key another', 'key'), - ('bare_key 2fews', 'bare_key'), - ('bare-key kfcw', 'bare-key'), - ), - tokens.TYPE_OPT_DOT: ( - ('."another key"', '.'), - ('.subname', '.'), - ), - tokens.TYPE_NEWLINE: ( - ('\n\r \n', '\n'), - ) -} - -# A mapping from a token type to a sequence of (source, matched_text) pairs that shouldn't result from consuming the -# source text. -invalid_tokens = { - tokens.TYPE_INTEGER: ( - ('_234_423', ''), - ('0446234234', ''), - ), - tokens.TYPE_STRING: ( - ('"""', '"""'), - ), - tokens.TYPE_BOOLEAN: ( - ('True', 'True'), - ('True', 'true'), - ), - tokens.TYPE_FLOAT: ( - ('', ''), - ) -} - - -def test_valid_tokenizing(): - for token_type in valid_tokens: - for (source, expected_match) in valid_tokens[token_type]: - - token = _munch_a_token(source) - assert token, "Failed to tokenize: {}\nExpected: {}\nOut of: {}\nGot nothing!".format( - token_type, expected_match, source) - - assert token.type == token_type, \ - "Expected type: {}\nOut of: {}\nThat matched: {}\nOf type: {}".format( - token_type, source, token.source_substring, token.type) - assert token.source_substring == expected_match - - -def test_invalid_tokenizing(): - for token_type in invalid_tokens: - for source, expected_match in invalid_tokens[token_type]: - token = _munch_a_token(source) - if token: - assert not (token.type == token_type and token.source_substring == expected_match) - - -def test_token_type_order(): - type_a = tokens.TokenType('a', 5, is_metadata=False) - type_b = tokens.TokenType('b', 0, is_metadata=False) - type_c = tokens.TokenType('c', 3, is_metadata=False) - - assert type_b < type_c < type_a - assert type_a > type_c > type_b diff --git a/pipenv/patched/prettytoml/parser/__init__.py b/pipenv/patched/prettytoml/parser/__init__.py deleted file mode 100644 index 4cf600c08e..0000000000 --- a/pipenv/patched/prettytoml/parser/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ - -""" - A parser for TOML tokens into TOML elements. -""" - - -from prettytoml.parser.errors import ParsingError - - -def parse_tokens(tokens): - """ - Parses the given token sequence into a sequence of top-level TOML elements. - - Raises ParserError on invalid TOML input. - """ - from .tokenstream import TokenStream - return _parse_token_stream(TokenStream(tokens)) - - -def _parse_token_stream(token_stream): - """ - Parses the given token_stream into a sequence of top-level TOML elements. - - Raises ParserError on invalid input TOML. - """ - from .parser import toml_file_elements - from .elementsanitizer import sanitize - - elements, pending = toml_file_elements(token_stream) - - if not pending.at_end: - raise ParsingError('Failed to parse line {}'.format(pending.head.row)) - - return sanitize(elements) diff --git a/pipenv/patched/prettytoml/parser/elementsanitizer.py b/pipenv/patched/prettytoml/parser/elementsanitizer.py deleted file mode 100644 index bec4893a98..0000000000 --- a/pipenv/patched/prettytoml/parser/elementsanitizer.py +++ /dev/null @@ -1,58 +0,0 @@ -from prettytoml import elements -from prettytoml.elements.table import TableElement -from prettytoml.elements.tableheader import TableHeaderElement -from prettytoml.errors import InvalidTOMLFileError -from prettytoml.util import PeekableIterator - - -def sanitize(_elements): - """ - Finds TableHeader elements that are not followed by TableBody elements and inserts empty TableElement - right after those. - """ - - output = list(_elements) - - def find_next_table_header(after=-1): - return next((i for (i, element) in enumerate(output) - if i > after and isinstance(element, TableHeaderElement)), float('-inf')) - - def find_next_table_body(after=-1): - return next((i for (i, element) in enumerate(output) - if i > after and isinstance(element, TableElement)), float('-inf')) - - next_table_header_i = find_next_table_header() - while next_table_header_i >= 0: - - following_table_header_i = find_next_table_header(next_table_header_i) - following_table_body_i = find_next_table_body(next_table_header_i) - - if (following_table_body_i < 0) or \ - (following_table_header_i >= 0 and (following_table_header_i < following_table_body_i)): - output.insert(next_table_header_i+1, TableElement(tuple())) - - next_table_header_i = find_next_table_header(next_table_header_i) - - return output - - -def validate_sanitized(_elements): - - # Non-metadata elements must start with an optional TableElement, followed by - # zero or more (TableHeaderElement, TableElement) pairs. - - if not _elements: - return - - it = PeekableIterator(e for e in _elements if e.type != elements.TYPE_METADATA) - - if isinstance(it.peek(), TableElement): - it.next() - - while it.peek(): - if not isinstance(it.peek(), TableHeaderElement): - raise InvalidTOMLFileError - it.next() - if not isinstance(it.peek(), TableElement): - raise InvalidTOMLFileError - it.next() diff --git a/pipenv/patched/prettytoml/parser/errors.py b/pipenv/patched/prettytoml/parser/errors.py deleted file mode 100644 index eca12f3259..0000000000 --- a/pipenv/patched/prettytoml/parser/errors.py +++ /dev/null @@ -1,17 +0,0 @@ -from prettytoml.errors import TOMLError - - -class ParsingError(TOMLError): - - def __init__(self, message='', token=None): - self.message = message - self.token = token - - def __repr__(self): - if self.message and self.token: - return "{} at row {} and col {}".format(self.message, self.token.row, self.token.col) - else: - return self.message - - def __str__(self): - return repr(self) diff --git a/pipenv/patched/prettytoml/parser/parser.py b/pipenv/patched/prettytoml/parser/parser.py deleted file mode 100644 index e61c0db5a4..0000000000 --- a/pipenv/patched/prettytoml/parser/parser.py +++ /dev/null @@ -1,376 +0,0 @@ - -""" - A Recursive Descent implementation of a lexical parser for TOML. - - Grammar: - -------- - - Newline -> NEWLINE - Comment -> COMMENT Newline - LineTerminator -> Comment | Newline - Space -> WHITESPACE Space | WHITESPACE | EMPTY - TableHeader -> Space [ Space TableHeaderName Space ] Space LineTerminator | - Space [[ Space TableHeaderName Space ]] Space LineTerminator - TableHeaderName -> STRING Space '.' Space TableHeaderName | STRING - Atomic -> STRING | INTEGER | FLOAT | DATE | BOOLEAN - - Array -> '[' Space ArrayInternal Space ']' | '[' Space ArrayInternal Space LineTerminator Space ']' - ArrayInternal -> LineTerminator Space ArrayInternal | Value Space ',' Space LineTerminator Space ArrayInternal | - Value Space ',' Space ArrayInternal | LineTerminator | Value | EMPTY - - InlineTable -> '{' Space InlineTableInternal Space '}' - InlineTableKeyValuePair = STRING Space '=' Space Value - InlineTableInternal -> InlineTableKeyValuePair Space ',' Space InlineTableInternal | - InlineTableKeyValuePair | Empty - - Value -> Atomic | InlineTable | Array - KeyValuePair -> Space STRING Space '=' Space Value Space LineTerminator - - TableBody -> KeyValuePair TableBody | EmptyLine TableBody | EmptyLine | KeyValuePair - - EmptyLine -> Space LineTerminator - FileEntry -> TableHeader | TableBody - - TOMLFileElements -> FileEntry TOMLFileElements | FileEntry | EmptyLine | EMPTY -""" - -from prettytoml import tokens -from prettytoml.elements.array import ArrayElement -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.inlinetable import InlineTableElement -from prettytoml.elements.metadata import NewlineElement, CommentElement, WhitespaceElement, PunctuationElement -from prettytoml.elements.table import TableElement -from prettytoml.elements.tableheader import TableHeaderElement - -from prettytoml.parser.recdesc import capture_from -from prettytoml.parser.errors import ParsingError -from prettytoml.parser.tokenstream import TokenStream - -""" - Non-terminals are represented as functions which return (RESULT, pending_token_stream), or raise ParsingError. -""" - - -def token(token_type): - def factory(ts): - t = ts.head - if t.type != token_type: - raise ParsingError('Expected a token of type {}'.format(token_type)) - return t, ts.tail - return factory - - -def newline_element(token_stream): - """ - Returns NewlineElement, pending_token_stream or raises ParsingError. - """ - captured = capture_from(token_stream).find(token(tokens.TYPE_NEWLINE)) - return NewlineElement(captured.value()), captured.pending_tokens - - -def comment_tokens(ts1): - c1 = capture_from(ts1).find(token(tokens.TYPE_COMMENT)).and_find(token(tokens.TYPE_NEWLINE)) - return c1.value(), c1.pending_tokens - - -def comment_element(token_stream): - """ - Returns CommentElement, pending_token_stream or raises ParsingError. - """ - captured = capture_from(token_stream).find(comment_tokens) - return CommentElement(captured.value()), captured.pending_tokens - - -def line_terminator_tokens(token_stream): - captured = capture_from(token_stream).find(comment_tokens).or_find(token(tokens.TYPE_NEWLINE)) - return captured.value(), captured.pending_tokens - - -def line_terminator_element(token_stream): - captured = capture_from(token_stream).find(comment_element).or_find(newline_element) - return captured.value('Expected a comment or a newline')[0], captured.pending_tokens - - -def zero_or_more_tokens(token_type): - - def factory(token_stream): - def more(ts): - c = capture_from(ts).find(token(token_type)).and_find(zero_or_more_tokens(token_type)) - return c.value(), c.pending_tokens - - def two(ts): - c = capture_from(ts).find(token(tokens.TYPE_WHITESPACE)) - return c.value(), c.pending - - def zero(ts): - return tuple(), ts - - captured = capture_from(token_stream).find(more).or_find(two).or_find(zero) - return captured.value(), captured.pending_tokens - - return factory - - -def space_element(token_stream): - captured = capture_from(token_stream).find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)) - return WhitespaceElement([t for t in captured.value() if t]), captured.pending_tokens - - -def string_token(token_stream): - captured = capture_from(token_stream).\ - find(token(tokens.TYPE_BARE_STRING)).\ - or_find(token(tokens.TYPE_STRING)).\ - or_find(token(tokens.TYPE_LITERAL_STRING)).\ - or_find(token(tokens.TYPE_MULTILINE_STRING)).\ - or_find(token(tokens.TYPE_MULTILINE_LITERAL_STRING)) - return captured.value('Expected a string'), captured.pending_tokens - - -def string_element(token_stream): - captured = capture_from(token_stream).find(string_token) - return AtomicElement(captured.value()), captured.pending_tokens - - -def table_header_name_tokens(token_stream): - - def one(ts): - c = capture_from(ts).\ - find(string_token).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(token(tokens.TYPE_OPT_DOT)).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(table_header_name_tokens) - return c.value(), c.pending_tokens - - captured = capture_from(token_stream).find(one).or_find(string_token) - return captured.value(), captured.pending_tokens - - -def table_header_element(token_stream): - - def single(ts1): - c1 = capture_from(ts1).\ - find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(token(tokens.TYPE_OP_SQUARE_LEFT_BRACKET)).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(table_header_name_tokens).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(token(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET)).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(line_terminator_tokens) - - return c1.value(), c1.pending_tokens - - def double(ts2): - c2 = capture_from(ts2).\ - find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(token(tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET)).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(table_header_name_tokens).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(token(tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET)).\ - and_find(zero_or_more_tokens(tokens.TYPE_WHITESPACE)).\ - and_find(line_terminator_tokens) - - return c2.value(), c2.pending_tokens - - captured = capture_from(token_stream).find(single).or_find(double) - return TableHeaderElement(captured.value()), captured.pending_tokens - - -def atomic_element(token_stream): - captured = capture_from(token_stream).\ - find(string_token).\ - or_find(token(tokens.TYPE_INTEGER)).\ - or_find(token(tokens.TYPE_FLOAT)).\ - or_find(token(tokens.TYPE_DATE)).\ - or_find(token(tokens.TYPE_BOOLEAN)) - return AtomicElement(captured.value('Expected an atomic primitive value')), captured.pending_tokens - - -def punctuation_element(token_type): - def factory(ts): - c = capture_from(ts).find(token(token_type)) - return PunctuationElement(c.value('Expected the punctuation element: {}'.format(token_type))), c.pending_tokens - return factory - - -def value(token_stream): - captured = capture_from(token_stream).\ - find(atomic_element).\ - or_find(array_element).\ - or_find(inline_table_element) - return captured.value('Expected a primitive value, array or an inline table'), captured.pending_tokens - - -def array_internal(ts): - - def zero(ts0): - c = capture_from(ts0).\ - and_find(line_terminator_element).\ - and_find(space_element).\ - and_find(array_internal) - return c.value(), c.pending_tokens - - def one(ts1): - c = capture_from(ts1).\ - find(value).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_COMMA)).\ - and_find(space_element).\ - and_find(line_terminator_element).\ - and_find(space_element).\ - and_find(array_internal) - return c.value(), c.pending_tokens - - def two(ts2): - c = capture_from(ts2).\ - find(value).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_COMMA)).\ - and_find(space_element).\ - and_find(array_internal) - return c.value(), c.pending_tokens - - def three(ts3): - c = capture_from(ts3).\ - find(space_element).\ - and_find(line_terminator_element) - return c.value(), c.pending_tokens - - captured = capture_from(ts).find(zero).or_find(one).or_find(two).or_find(three).or_find(value).or_empty() - return captured.value(), captured.pending_tokens - - -def array_element(token_stream): - - def one(ts1): - ca = capture_from(ts1).\ - find(punctuation_element(tokens.TYPE_OP_SQUARE_LEFT_BRACKET)).\ - and_find(space_element).\ - and_find(array_internal).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET)) - return ca.value(), ca.pending_tokens - - def two(ts2): - ca = capture_from(ts2).\ - find(punctuation_element(tokens.TYPE_OP_SQUARE_LEFT_BRACKET)).\ - and_find(space_element).\ - and_find(array_internal).\ - and_find(space_element).\ - and_find(line_terminator_element).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET)) - return ca.value(), ca.pending_tokens - - captured = capture_from(token_stream).find(one).or_find(two) - return ArrayElement(captured.value()), captured.pending_tokens - - -def inline_table_element(token_stream): - - # InlineTableElement -> '{' Space InlineTableInternal Space '}' - # InlineTableKeyValuePair = STRING Space '=' Space Value - # InlineTableInternal -> InlineTableKeyValuePair Space ',' Space InlineTableInternal | - # InlineTableKeyValuePair | Empty - - def key_value(ts): - ca = capture_from(ts).\ - find(string_element).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_ASSIGNMENT)).\ - and_find(space_element).\ - and_find(value) - return ca.value(), ca.pending_tokens - - def internal(ts): - def one(ts1): - c1 = capture_from(ts1).\ - find(key_value).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_COMMA)).\ - and_find(space_element).\ - and_find(internal) - return c1.value(), c1.pending_tokens - - c = capture_from(ts).find(one).or_find(key_value).or_empty() - return c.value(), c.pending_tokens - - captured = capture_from(token_stream).\ - find(punctuation_element(tokens.TYPE_OP_CURLY_LEFT_BRACKET)).\ - and_find(space_element).\ - and_find(internal).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_CURLY_RIGHT_BRACKET)) - - return InlineTableElement(captured.value()), captured.pending_tokens - - -def key_value_pair(token_stream): - captured = capture_from(token_stream).\ - find(space_element).\ - and_find(string_element).\ - and_find(space_element).\ - and_find(punctuation_element(tokens.TYPE_OP_ASSIGNMENT)).\ - and_find(space_element).\ - and_find(value).\ - and_find(space_element).\ - and_find(line_terminator_element) - return captured.value(), captured.pending_tokens - - -def table_body_elements(token_stream): - - # TableBody -> KeyValuePair TableBody | EmptyLine TableBody | EmptyLine | KeyValuePair - - def one(ts1): - c = capture_from(ts1).\ - find(key_value_pair).\ - and_find(table_body_elements) - return c.value(), c.pending_tokens - - def two(ts2): - c = capture_from(ts2).\ - find(empty_line_elements).\ - and_find(table_body_elements) - return c.value(), c.pending_tokens - - captured = capture_from(token_stream).\ - find(one).\ - or_find(two).\ - or_find(empty_line_elements).\ - or_find(key_value_pair) - - return captured.value(), captured.pending_tokens - - -def table_body_element(token_stream): - captured = capture_from(token_stream).find(table_body_elements) - return TableElement(captured.value()), captured.pending_tokens - - -def empty_line_tokens(ts1): - c1 = capture_from(ts1).find(space_element).and_find(line_terminator_element) - return c1.value(), c1.pending_tokens - - -def empty_line_elements(token_stream): - captured = capture_from(token_stream).find(empty_line_tokens) - return captured.value(), captured.pending_tokens - - -def file_entry_element(token_stream): - captured = capture_from(token_stream).find(table_header_element).\ - or_find(table_body_element) - return captured.value(), captured.pending_tokens - - -def toml_file_elements(token_stream): - - def one(ts1): - c1 = capture_from(ts1).find(file_entry_element).and_find(toml_file_elements) - return c1.value(), c1.pending_tokens - - captured = capture_from(token_stream).find(one).or_find(file_entry_element).or_empty() - return captured.value(), captured.pending_tokens diff --git a/pipenv/patched/prettytoml/parser/recdesc.py b/pipenv/patched/prettytoml/parser/recdesc.py deleted file mode 100644 index 8731dba3ba..0000000000 --- a/pipenv/patched/prettytoml/parser/recdesc.py +++ /dev/null @@ -1,114 +0,0 @@ -from prettytoml.parser.errors import ParsingError -from prettytoml.parser.tokenstream import TokenStream - - -class Capturer: - """ - Recursive-descent matching DSL. Yeah.. - """ - - def __init__(self, token_stream, value=tuple(), dormant_error=None): - self._token_stream = token_stream - self._value = value - self._dormant_error = dormant_error - - def find(self, finder): - """ - Searches the token stream using the given finder. - - `finder(ts)` is a function that accepts a `TokenStream` instance and returns `(element, pending_ts)` - where `element` is the found "something" or a sequence of "somethings", and `pending_ts` the unconsumed - `TokenStream`. - - `finder(ts)` can raise `ParsingError` to indicate that it couldn't find anything, or - a `TokenStream.EndOfStream` to indicate a premature end of the TokenStream. - - This method returns a Capturer instance that can be further used to find more and more "somethings". The value - at any given moment can be retrieved via the `Capturer.value()` method. - """ - - try: - - # Execute finder! - element, pending_ts = finder(self._token_stream) - - # If result is not a sequence, make it so - if not isinstance(element, (tuple, list)): - element = (element,) - - # Return a Capturer with accumulated findings - return Capturer(pending_ts, value=self.value() + element) - - except ParsingError as e: - - # Failed to find, store error in returned value - return Capturer(self._token_stream, dormant_error=e) - - except TokenStream.EndOfStream as e: - - # Premature end of stream, store error in returned value - return Capturer(self._token_stream, dormant_error=e) - - def value(self, parsing_expectation_msg=None): - """ - Returns the accumulated values found as a sequence of values, or raises an encountered dormant error. - - If parsing_expectation_msg is specified and a dormant_error is a ParsingError, the expectation message is used - instead in it. - """ - - if self._dormant_error: - if parsing_expectation_msg and isinstance(self._dormant_error, ParsingError): - raise ParsingError(parsing_expectation_msg, token=self._token_stream.head) - else: - raise self._dormant_error - return self._value - - @property - def pending_tokens(self): - """ - Returns a TokenStream with the pending tokens yet to be processed. - """ - return self._token_stream - - def or_find(self, finder): - """ - If a dormant_error is present, try this new finder instead. If not, does nothing. - """ - if self._dormant_error: - return Capturer(self._token_stream).find(finder) - else: - return self - - def or_end_of_file(self): - """ - Discards any errors if at end of the stream. - """ - if isinstance(self._dormant_error, TokenStream.EndOfStream): - return Capturer(self.pending_tokens, value=self._value) - else: - return self - - def or_empty(self): - """ - Discards any previously-encountered dormant error. - """ - if self._dormant_error: - return Capturer(self.pending_tokens, value=self._value) - else: - return self - - def and_find(self, finder): - """ - Accumulate new "somethings" to the stored value using the given finder. - """ - - if self._dormant_error: - return Capturer(self.pending_tokens, dormant_error=self._dormant_error) - - return Capturer(self.pending_tokens, self.value()).find(finder) - - -def capture_from(token_stream): - return Capturer(token_stream) - diff --git a/pipenv/patched/prettytoml/parser/test_parser.py b/pipenv/patched/prettytoml/parser/test_parser.py deleted file mode 100644 index 40dd3dba77..0000000000 --- a/pipenv/patched/prettytoml/parser/test_parser.py +++ /dev/null @@ -1,156 +0,0 @@ -from prettytoml.elements.array import ArrayElement -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import CommentElement, NewlineElement, WhitespaceElement -from prettytoml.elements.tableheader import TableHeaderElement -from prettytoml.lexer import tokenize -from prettytoml.parser import parser -from prettytoml.parser.tokenstream import TokenStream - - -def test_line_terminator_1(): - tokens = tokenize('# Sup\n') - ts = TokenStream(tokens) - element, pending_ts = parser.line_terminator_element(ts) - - assert isinstance(element, CommentElement) - assert pending_ts.offset == 2 - assert ts.offset == 0 - - -def test_line_terminator_2(): - tokens = tokenize('\n') - ts = TokenStream(tokens) - element, pending_ts = parser.line_terminator_element(ts) - - assert isinstance(element, NewlineElement) - assert pending_ts.offset == 1 - assert ts.offset == 0 - - -def test_space_1(): - ts = TokenStream(tokenize(' noo')) - space_element, pending_ts = parser.space_element(ts) - - assert isinstance(space_element, WhitespaceElement) - assert len(space_element.tokens) == 2 - assert pending_ts.offset == 2 - assert ts.offset == 0 - - -def test_space_2(): - ts = TokenStream(tokenize(' noo')) - space_element, pending_ts = parser.space_element(ts) - - assert isinstance(space_element, WhitespaceElement) - assert len(space_element.tokens) == 1 - assert pending_ts.offset == 1 - assert ts.offset == 0 - - -def test_space_3(): - ts = TokenStream(tokenize('noo')) - space_element, pending_ts = parser.space_element(ts) - - assert isinstance(space_element, WhitespaceElement) - assert len(space_element.tokens) == 0 - assert pending_ts.offset == 0 - assert ts.offset == 0 - - -def test_table_header(): - ts = TokenStream(tokenize(" [ namez . namey . namex ] \n other things")) - table_header_element, pending_tokens = parser.table_header_element(ts) - - assert isinstance(table_header_element, TableHeaderElement) - assert len(pending_tokens) == 4 - - -def test_atomic_element(): - e1, p1 = parser.atomic_element(TokenStream(tokenize('42 not'))) - assert isinstance(e1, AtomicElement) and e1.value == 42 - assert len(p1) == 2 - - e2, p2 = parser.atomic_element(TokenStream(tokenize('not 42'))) - assert isinstance(e2, AtomicElement) and e2.value == 'not' - assert len(p2) == 2 - - -def test_array(): - array_element, pending_ts = parser.array_element(TokenStream(tokenize('[ 3, 4, 5,6,7] '))) - - assert isinstance(array_element, ArrayElement) - assert len(array_element) == 5 - assert len(pending_ts) == 1 - - -def test_array_2(): - - text = """[ - "alpha", - "omega" -]""" - - array_element, pending_ts = parser.array_element(TokenStream(tokenize(text))) - - assert array_element[0] == 'alpha' - assert array_element[1] == 'omega' - - -def test_empty_array(): - - text = '[]' - - array_element, pending_ts = parser.array_element(TokenStream(tokenize(text))) - - assert isinstance(array_element, ArrayElement) - assert pending_ts.at_end - - -def test_inline_table(): - inline_table, pending_ts = parser.inline_table_element(TokenStream(tokenize('{ "id"= 42,test = name} vroom'))) - - assert set(inline_table.keys()) == {'id', 'test'} - assert len(pending_ts) == 2 - assert inline_table['id'] == 42 - assert inline_table['test'] == 'name' - - -def test_table_body(): - table_body, pending_ts = parser.table_body_element(TokenStream(tokenize(' name= "test" # No way man!\nid =42\n vvv'))) - assert set(table_body.keys()) == {'name', 'id'} - assert len(pending_ts) == 2 - assert table_body['name'] == 'test' - assert table_body['id'] == 42 - - -def test_key_value_pair(): - text = """hosts = [ - "alpha", - "omega" -] -""" - - parsed, pending_ts = parser.key_value_pair(TokenStream(tokenize(text))) - - assert isinstance(parsed[1], AtomicElement) - assert isinstance(parsed[5], ArrayElement) - - -def test_table_body_2(): - - text = """ -data = [ ["gamma", "delta"], [1, 2] ] - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] - -str_multiline = wohoo -""" - - table_body, pending_ts = parser.table_body_element(TokenStream(tokenize(text))) - - assert len(pending_ts) == 0 - diff --git a/pipenv/patched/prettytoml/parser/tokenstream.py b/pipenv/patched/prettytoml/parser/tokenstream.py deleted file mode 100644 index 2a2fdc25f7..0000000000 --- a/pipenv/patched/prettytoml/parser/tokenstream.py +++ /dev/null @@ -1,39 +0,0 @@ - -class TokenStream: - """ - An immutable subset of a token sequence - """ - - class EndOfStream(Exception): - pass - - Nothing = tuple() - - def __init__(self, _tokens, offset=0): - if isinstance(_tokens, tuple): - self._tokens = _tokens - else: - self._tokens = tuple(_tokens) - self._head_index = offset - - def __len__(self): - return len(self._tokens) - self.offset - - @property - def head(self): - try: - return self._tokens[self._head_index] - except IndexError: - raise TokenStream.EndOfStream - - @property - def tail(self): - return TokenStream(self._tokens, offset=self._head_index+1) - - @property - def offset(self): - return self._head_index - - @property - def at_end(self): - return self.offset >= len(self._tokens) diff --git a/pipenv/patched/prettytoml/prettifier/__init__.py b/pipenv/patched/prettytoml/prettifier/__init__.py deleted file mode 100644 index 97ac161930..0000000000 --- a/pipenv/patched/prettytoml/prettifier/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from . import deindentanonymoustable, tableindent, tableassignment -from prettytoml.prettifier import tablesep, commentspace, linelength, tableentrysort - -""" - TOMLFile prettifiers - - Each prettifier is a function that accepts a sequence of Element instances that make up a - TOML file and it is allowed to modify it as it pleases. -""" - - -UNIFORM_TABLE_INDENTATION = tableindent.table_entries_should_be_uniformly_indented -UNIFORM_TABLE_ASSIGNMENT_SPACING = tableassignment.table_assignment_spacing -ANONYMOUS_TABLE_INDENTATION = deindentanonymoustable.deindent_anonymous_table -COMMENT_SPACING = commentspace.comment_space -TABLE_SPACING = tablesep.table_separation -LINE_LENGTH_ENFORCERS = linelength.line_length_limiter -TABLE_ENTRY_SORTING = tableentrysort.sort_table_entries - - -ALL = ( - TABLE_SPACING, # Must be before COMMENT_SPACING - COMMENT_SPACING, # Must be after TABLE_SPACING - UNIFORM_TABLE_INDENTATION, - UNIFORM_TABLE_ASSIGNMENT_SPACING, - ANONYMOUS_TABLE_INDENTATION, - LINE_LENGTH_ENFORCERS, - TABLE_ENTRY_SORTING, -) - - -def prettify(toml_file_elements, prettifiers=ALL): - """ - Prettifies a sequence of element instances according to pre-defined set of formatting rules. - """ - elements = toml_file_elements[:] - for prettifier in prettifiers: - elements = prettifier(elements) - return elements diff --git a/pipenv/patched/prettytoml/prettifier/commentspace.py b/pipenv/patched/prettytoml/prettifier/commentspace.py deleted file mode 100644 index fd54934917..0000000000 --- a/pipenv/patched/prettytoml/prettifier/commentspace.py +++ /dev/null @@ -1,35 +0,0 @@ - -from prettytoml.elements import traversal as t, factory as element_factory -from prettytoml.elements.table import TableElement - - -def comment_space(toml_file_elements): - """ - Rule: Line-terminating comments should always be prefixed by a single tab character whitespace only. - """ - elements = toml_file_elements[:] - for element in elements: - if isinstance(element, TableElement): - _do_table(element.sub_elements) - return elements - - -def _do_table(table_elements): - - # Iterator index - i = float('-inf') - - def next_newline(): - return t.find_following(table_elements, t.predicates.newline, i) - - def next_comment(): - return t.find_following(table_elements, t.predicates.comment, i) - - def last_non_metadata(): - return t.find_previous(table_elements, t.predicates.non_metadata, next_comment()) - - while next_comment() >= 0: - if i < last_non_metadata() < next_comment() < next_newline(): - del table_elements[last_non_metadata()+1:next_comment()] - table_elements.insert(next_comment(), element_factory.create_whitespace_element(char='\t', length=1)) - i = next_newline() diff --git a/pipenv/patched/prettytoml/prettifier/common.py b/pipenv/patched/prettytoml/prettifier/common.py deleted file mode 100644 index dd1e01a216..0000000000 --- a/pipenv/patched/prettytoml/prettifier/common.py +++ /dev/null @@ -1,54 +0,0 @@ - -from itertools import * -from prettytoml.elements.common import TokenElement -from prettytoml.elements.metadata import NewlineElement - - -def text_to_elements(toml_text): - from ..lexer import tokenize - from ..parser import parse_tokens - return parse_tokens(tokenize(toml_text)) - - -def elements_to_text(toml_elements): - return ''.join(e.serialized() for e in toml_elements) - - -def assert_prettifier_works(source_text, expected_text, prettifier_func): - assert expected_text == elements_to_text(prettifier_func(text_to_elements(source_text))) - - -def lines(elements): - """ - Splits a sequence of elements into a sub-sequence of each line. - - A line is defined as a sequence of elements terminated by a NewlineElement. - """ - - def __next_line(es): - # Returns the next line and the remaining sequence of elements - line = tuple(takewhile(lambda e: not isinstance(e, NewlineElement), es)) - line += (es[len(line)],) - return line, es[len(line):] - - left_elements = tuple(elements) - while left_elements: - line, left_elements = __next_line(left_elements) - yield line - - -def non_empty_elements(elements): - """ - Filters out TokenElement instances with zero tokens. - """ - return filter(lambda e: not (isinstance(e, TokenElement) and not e.tokens), elements) - - -def index(predicate, seq): - """ - Returns the index of the element satisfying the given predicate, or None. - """ - try: - return next(i for (i, e) in enumerate(seq) if predicate(e)) - except StopIteration: - return None diff --git a/pipenv/patched/prettytoml/prettifier/deindentanonymoustable.py b/pipenv/patched/prettytoml/prettifier/deindentanonymoustable.py deleted file mode 100644 index a661f70452..0000000000 --- a/pipenv/patched/prettytoml/prettifier/deindentanonymoustable.py +++ /dev/null @@ -1,43 +0,0 @@ -import operator -from prettytoml.elements import traversal as t, traversal -from itertools import * -from functools import * -from prettytoml.elements.metadata import WhitespaceElement -from prettytoml.elements.table import TableElement -from prettytoml.prettifier import common - - -def deindent_anonymous_table(toml_file_elements): - """ - Rule: Anonymous table should never be indented. - """ - - anonymous_table_index = _find_anonymous_table(toml_file_elements) - if anonymous_table_index is None: - return toml_file_elements - - return toml_file_elements[:anonymous_table_index] + \ - [_unindent_table(toml_file_elements[anonymous_table_index])] + \ - toml_file_elements[anonymous_table_index+1:] - - -def _unindent_table(table_element): - table_lines = tuple(common.lines(table_element.sub_elements)) - unindented_lines = tuple(tuple(dropwhile(lambda e: isinstance(e, WhitespaceElement), line)) for line in table_lines) - return TableElement(reduce(operator.concat, unindented_lines)) - - -def _find_anonymous_table(toml_file_elements): - """ - Finds and returns the index of the TableElement comprising the anonymous table or None. - """ - - first_table_index = common.index(t.predicates.table, toml_file_elements) - first_table_header_index = common.index(t.predicates.table_header, toml_file_elements) - - if first_table_header_index is None: - return first_table_index - elif first_table_index < first_table_header_index: - return first_table_index - - diff --git a/pipenv/patched/prettytoml/prettifier/linelength.py b/pipenv/patched/prettytoml/prettifier/linelength.py deleted file mode 100644 index 67d3a1127d..0000000000 --- a/pipenv/patched/prettytoml/prettifier/linelength.py +++ /dev/null @@ -1,62 +0,0 @@ -import operator -from prettytoml import tokens -from prettytoml.prettifier import common -from prettytoml.elements import traversal as t, factory as element_factory -from prettytoml.elements.array import ArrayElement -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.inlinetable import InlineTableElement -from prettytoml.elements.table import TableElement -from functools import * - - -MAXIMUM_LINE_LENGTH = 120 - - -def line_length_limiter(toml_file_elements): - """ - Rule: Lines whose lengths exceed 120 characters whose values are strings, arrays should have the array or - string value broken onto multiple lines - """ - return tuple(_fixed_table(e) if isinstance(e, TableElement) else e for e in toml_file_elements) - - -def _fixed_table(table_element): - """ - Returns a new TableElement. - """ - assert isinstance(table_element, TableElement) - lines = tuple(common.lines(table_element.sub_elements)) - fixed_lines = tuple(_fixed_line(l) if _line_length(l) > MAXIMUM_LINE_LENGTH else l for l in lines) - return TableElement(sub_elements=tuple(reduce(operator.concat, fixed_lines))) - - -def _line_length(line_elements): - """ - Returns the character length of the serialized elements of the given line. - """ - return sum(len(e.serialized()) for e in line_elements) - - -def _fixed_line(line_elements): - - def line_value_index(): - # Returns index of value element in the line - key_index = t.find_following(line_elements, t.predicates.non_metadata) - return t.find_following(line_elements, t.predicates.non_metadata, key_index) - - def multiline_equivalent(element): - if isinstance(element, AtomicElement) and tokens.is_string(element.first_token): - return element_factory.create_multiline_string(element.value, MAXIMUM_LINE_LENGTH) - elif isinstance(element, ArrayElement): - element.turn_into_multiline() - return element - else: - return element - - line_elements = tuple(line_elements) - value_index = line_value_index() - if value_index >= 0: - return line_elements[:value_index] + (multiline_equivalent(line_elements[value_index]),) + \ - line_elements[value_index+1:] - else: - return line_elements diff --git a/pipenv/patched/prettytoml/prettifier/tableassignment.py b/pipenv/patched/prettytoml/prettifier/tableassignment.py deleted file mode 100644 index 1d35d698b8..0000000000 --- a/pipenv/patched/prettytoml/prettifier/tableassignment.py +++ /dev/null @@ -1,40 +0,0 @@ - -from prettytoml.elements import traversal as t, factory as element_factory - - -def table_assignment_spacing(toml_file_elements): - """ - Rule: Every key and value pair in any table should be separated the triplet - (single space character, an assignment character =, single space character) - """ - elements = toml_file_elements[:] - for table_element in (e for e in elements if t.predicates.table(e)): - _do_table(table_element) - return elements - - -def _do_table(table_element): - - elements = table_element.sub_elements - - # Our iterator index - i = float('-inf') - - def next_key(): - return t.find_following(elements, t.predicates.non_metadata, i) - - def next_assignment(): - return t.find_following(elements, t.predicates.op_assignment, next_key()) - - def next_value(): - return t.find_following(elements, t.predicates.non_metadata, next_assignment()) - - while next_key() >= 0: - - del elements[next_key()+1:next_assignment()] - del elements[next_assignment()+1:next_value()] - - elements.insert(next_assignment(), element_factory.create_whitespace_element(1)) - elements.insert(next_value(), element_factory.create_whitespace_element(1)) - - i = t.find_following(elements, t.predicates.newline, i) diff --git a/pipenv/patched/prettytoml/prettifier/tableentrysort.py b/pipenv/patched/prettytoml/prettifier/tableentrysort.py deleted file mode 100644 index 8cbd307b49..0000000000 --- a/pipenv/patched/prettytoml/prettifier/tableentrysort.py +++ /dev/null @@ -1,38 +0,0 @@ -import operator -from prettytoml import tokens -from prettytoml.elements.common import TokenElement -from prettytoml.elements.table import TableElement -from prettytoml.prettifier import common -from functools import * - - -def sort_table_entries(toml_file_elements): - """ - Rule: Entries within a single table should be ordered lexicographically by key - """ - return [_sorted_table(element) if isinstance(element, TableElement) else element for element in toml_file_elements] - - -def _line_key(line_elements): - """ - Given a sequence of elements comprising a single line, returns an orderable value to use in ordering lines. - """ - for e in line_elements: - if isinstance(e, TokenElement) and tokens.is_string(e.first_token): - return e.primitive_value - return 'z' * 10 # Metadata lines should be at the end - - -def _sorted_table(table): - """ - Returns another TableElement where the table entries are sorted lexicographically by key. - """ - assert isinstance(table, TableElement) - - # Discarding TokenElements with no tokens in them - table_elements = common.non_empty_elements(table.sub_elements) - lines = tuple(common.lines(table_elements)) - sorted_lines = sorted(lines, key=_line_key) - sorted_elements = reduce(operator.concat, sorted_lines) - - return TableElement(sorted_elements) diff --git a/pipenv/patched/prettytoml/prettifier/tableindent.py b/pipenv/patched/prettytoml/prettifier/tableindent.py deleted file mode 100644 index 3b60883d56..0000000000 --- a/pipenv/patched/prettytoml/prettifier/tableindent.py +++ /dev/null @@ -1,49 +0,0 @@ -from prettytoml import tokens -from prettytoml.elements import traversal as t, factory as element_factory -from prettytoml.tokens import py2toml - - -def table_entries_should_be_uniformly_indented(toml_file_elements): - """ - Rule: Nth-level table sections should be indented by (N-1)*2 spaces - """ - elements = toml_file_elements[:] - for (i, e) in enumerate(elements): - if t.predicates.table_header(e): - table = elements[t.find_following(elements, t.predicates.table, i)] - _do_table_header(e) - _do_table(table, len(e.names)) - return elements - - -def _do_table_header(table_header): - indent_start = 0 - indent_end = next(i for (i, token) in enumerate(table_header.tokens) if token.type != tokens.TYPE_WHITESPACE) - - del table_header.tokens[indent_start:indent_end] - table_header.tokens.insert(0, py2toml.create_whitespace(' ' * ((len(table_header.names)-1) * 2))) - - -def _do_table(table_element, table_level): - - elements = table_element.sub_elements - - # Iterator index - i = float('-inf') - - def first_indent(): - return t.find_following(elements, t.predicates.whitespace, i) - - def next_non_metadata(): - return t.find_following(elements, t.predicates.non_metadata, i) - - def next_newline(): - return t.find_following(elements, t.predicates.newline, next_non_metadata()) - - while next_non_metadata() >= 0: - if first_indent() >= 0: - del elements[first_indent():next_non_metadata()] - - elements.insert(next_non_metadata(), element_factory.create_whitespace_element((table_level-1)*2)) - - i = next_newline() diff --git a/pipenv/patched/prettytoml/prettifier/tablesep.py b/pipenv/patched/prettytoml/prettifier/tablesep.py deleted file mode 100644 index 059007f328..0000000000 --- a/pipenv/patched/prettytoml/prettifier/tablesep.py +++ /dev/null @@ -1,31 +0,0 @@ - -from prettytoml.elements import traversal as t, factory as element_factory -from prettytoml.elements.metadata import WhitespaceElement, NewlineElement -from prettytoml.elements.table import TableElement - - -def table_separation(toml_file_elements): - """ - Rule: Tables should always be separated by an empty line. - """ - elements = toml_file_elements[:] - for element in elements: - if isinstance(element, TableElement): - _do_table(element.sub_elements) - return elements - - -def _do_table(table_elements): - - while table_elements and isinstance(table_elements[-1], WhitespaceElement): - del table_elements[-1] - - if not table_elements: - return - - if isinstance(table_elements[-1], NewlineElement): - last_non_metadata_i = t.find_previous(table_elements, t.predicates.non_metadata) - del table_elements[last_non_metadata_i+1:] - - table_elements.append(element_factory.create_newline_element()) - table_elements.append(element_factory.create_newline_element()) diff --git a/pipenv/patched/prettytoml/prettifier/test_commentspace.py b/pipenv/patched/prettytoml/prettifier/test_commentspace.py deleted file mode 100644 index 53d96d76b1..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_commentspace.py +++ /dev/null @@ -1,28 +0,0 @@ - -from .common import assert_prettifier_works -from .commentspace import comment_space - - -def test_comment_space(): - - toml_text = """ -my_key = string -id = 12 # My special ID - -[section.name] -headerk = false -# Own-line comment should stay the same -other_key = "value" -""" - - expected_toml_text = """ -my_key = string -id = 12\t# My special ID - -[section.name] -headerk = false -# Own-line comment should stay the same -other_key = "value" -""" - - assert_prettifier_works(toml_text, expected_toml_text, comment_space) diff --git a/pipenv/patched/prettytoml/prettifier/test_deindentanonymoustable.py b/pipenv/patched/prettytoml/prettifier/test_deindentanonymoustable.py deleted file mode 100644 index 10a6d2c800..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_deindentanonymoustable.py +++ /dev/null @@ -1,22 +0,0 @@ - -""" - This testing module depends on all the other modules. -""" - -from .deindentanonymoustable import deindent_anonymous_table -from .common import assert_prettifier_works - - -def test_anon_table_indent(): - toml_text = """ - key=value - another_key =44 -noname = me -""" - - expected_toml_text = """ -key=value -another_key =44 -noname = me -""" - assert_prettifier_works(toml_text, expected_toml_text, deindent_anonymous_table) diff --git a/pipenv/patched/prettytoml/prettifier/test_linelength.py b/pipenv/patched/prettytoml/prettifier/test_linelength.py deleted file mode 100644 index e4ab8fcbaf..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_linelength.py +++ /dev/null @@ -1,39 +0,0 @@ -from .linelength import line_length_limiter -from .common import assert_prettifier_works, elements_to_text, text_to_elements -import pytoml - - -def test_splitting_string(): - toml_text = """ -k = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. In et lectus nec erat condimentum scelerisque gravida sed ipsum. Mauris non orci tincidunt, viverra enim eget, tincidunt orci. Sed placerat nibh vitae ante maximus egestas maximus eu quam. Praesent vehicula mauris vestibulum, mattis turpis sollicitudin, aliquam felis. Pellentesque volutpat pharetra purus vel finibus. Vestibulum sed tempus dui. Maecenas auctor sit amet diam et porta. Morbi id libero at elit ultricies porta vel vitae nullam. " -""" - - expected_toml_text = """ -k = \"\"\" -Lorem ipsum dolor sit amet, consectetur adipiscing elit. In et lectus nec erat condimentum scelerisque gravida sed \\ -ipsum. Mauris non orci tincidunt, viverra enim eget, tincidunt orci. Sed placerat nibh vitae ante maximus egestas \\ -maximus eu quam. Praesent vehicula mauris vestibulum, mattis turpis sollicitudin, aliquam felis. Pellentesque volutpat \\ -pharetra purus vel finibus. Vestibulum sed tempus dui. Maecenas auctor sit amet diam et porta. Morbi id libero at elit \\ -ultricies porta vel vitae nullam. \"\"\" -""" - assert_prettifier_works(toml_text, expected_toml_text, line_length_limiter) - - -def test_splitting_array(): - toml_text = """ - -somethingweird = false - -[section] -k = [4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42, 4, 8, 15, 16, 23, 42] - - -[data] -id = 12 - -""" - - prettified = elements_to_text(line_length_limiter(text_to_elements(toml_text))) - - assert pytoml.loads(prettified) == pytoml.loads(toml_text) - assert all(len(line) < 120 for line in prettified.split('\n')) diff --git a/pipenv/patched/prettytoml/prettifier/test_tableassignment.py b/pipenv/patched/prettytoml/prettifier/test_tableassignment.py deleted file mode 100644 index 82fcc174d5..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_tableassignment.py +++ /dev/null @@ -1,29 +0,0 @@ - -from .tableassignment import table_assignment_spacing -from .common import assert_prettifier_works - - -def test_table_assignment_spacing(): - toml_text = """ - key1= "my value" - key2 =42 - keys = [4, 5,1] - - [section] - key1= "my value" - key2 =42 - keys = [4, 5,1] -""" - - expected_prettified = """ - key1 = "my value" - key2 = 42 - keys = [4, 5,1] - - [section] - key1 = "my value" - key2 = 42 - keys = [4, 5,1] -""" - - assert_prettifier_works(toml_text, expected_prettified, table_assignment_spacing) diff --git a/pipenv/patched/prettytoml/prettifier/test_tableentrysort.py b/pipenv/patched/prettytoml/prettifier/test_tableentrysort.py deleted file mode 100644 index 0cc39f7885..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_tableentrysort.py +++ /dev/null @@ -1,45 +0,0 @@ - -from .tableentrysort import sort_table_entries -from .common import assert_prettifier_works - - -def test_table_sorting(): - toml_text = """description = "" -firstname = "adnan" -lastname = "fatayerji" -git_aydo = "" -groups = ["sales", "dubai", "mgmt"] -skype = "" -emails = ["adnan@incubaid.com", - "fatayera@incubaid.com", - "adnan.fatayerji@incubaid.com", - "adnan@greenitglobe.com", - "fatayera@greenitglobe.com", - "adnan.fatayerji@greenitglobe.com"] -# I really like this table -id = "fatayera" -git_github = "" -telegram = "971507192009" -mobiles = ["971507192009"] -""" - - prettified = """description = "" -emails = ["adnan@incubaid.com", - "fatayera@incubaid.com", - "adnan.fatayerji@incubaid.com", - "adnan@greenitglobe.com", - "fatayera@greenitglobe.com", - "adnan.fatayerji@greenitglobe.com"] -firstname = "adnan" -git_aydo = "" -git_github = "" -groups = ["sales", "dubai", "mgmt"] -# I really like this table -id = "fatayera" -lastname = "fatayerji" -mobiles = ["971507192009"] -skype = "" -telegram = "971507192009" -""" - - assert_prettifier_works(toml_text, prettified, sort_table_entries) diff --git a/pipenv/patched/prettytoml/prettifier/test_tableindent.py b/pipenv/patched/prettytoml/prettifier/test_tableindent.py deleted file mode 100644 index a37f73a721..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_tableindent.py +++ /dev/null @@ -1,25 +0,0 @@ - -from .tableindent import table_entries_should_be_uniformly_indented -from .common import assert_prettifier_works - - -def test_table_entries_should_be_uniformly_indented(): - toml_text = """ - [firstlevel] -hello = "my name" - my_id = 12 - - [firstlevel.secondlevel] - my_truth = False -""" - - expected_toml_text = """ -[firstlevel] -hello = "my name" -my_id = 12 - - [firstlevel.secondlevel] - my_truth = False -""" - - assert_prettifier_works(toml_text, expected_toml_text, table_entries_should_be_uniformly_indented) diff --git a/pipenv/patched/prettytoml/prettifier/test_tablesep.py b/pipenv/patched/prettytoml/prettifier/test_tablesep.py deleted file mode 100644 index a8a81d52a4..0000000000 --- a/pipenv/patched/prettytoml/prettifier/test_tablesep.py +++ /dev/null @@ -1,34 +0,0 @@ - -from .tablesep import table_separation -from .common import assert_prettifier_works - - -def test_table_separation(): - - toml_text = """key1 = "value1" -key2 = 22 -[section] -k = false -m= "true" - - - -[another.section] -l = "t" -creativity = "on vacation" -""" - - expected_toml_text = """key1 = "value1" -key2 = 22 - -[section] -k = false -m= "true" - -[another.section] -l = "t" -creativity = "on vacation" - -""" - - assert_prettifier_works(toml_text, expected_toml_text, table_separation) diff --git a/pipenv/patched/prettytoml/test_prettifier.py b/pipenv/patched/prettytoml/test_prettifier.py deleted file mode 100644 index f702fb01b8..0000000000 --- a/pipenv/patched/prettytoml/test_prettifier.py +++ /dev/null @@ -1,12 +0,0 @@ - -from .prettifier import prettify -from .prettifier.common import assert_prettifier_works -import pytoml - - -def test_prettifying_against_humanly_verified_sample(): - toml_source = open('sample.toml').read() - expected = open('sample-prettified.toml').read() - - assert_prettifier_works(toml_source, expected, prettify) - assert pytoml.loads(toml_source) == pytoml.loads(expected) diff --git a/pipenv/patched/prettytoml/test_util.py b/pipenv/patched/prettytoml/test_util.py deleted file mode 100644 index b741abfa63..0000000000 --- a/pipenv/patched/prettytoml/test_util.py +++ /dev/null @@ -1,22 +0,0 @@ -from prettytoml.util import is_sequence_like, is_dict_like, chunkate_string - - -def test_is_sequence_like(): - assert is_sequence_like([1, 3, 4]) - assert not is_sequence_like(42) - - -def test_is_dict_like(): - assert is_dict_like({'name': False}) - assert not is_dict_like(42) - assert not is_dict_like([4, 8, 15]) - - -def test_chunkate_string(): - - text = """Lorem ipsum dolor sit amet, consectetur adipiscing elit. In et lectus nec erat condimentum scelerisque gravida sed ipsum. Mauris non orci tincidunt, viverra enim eget, tincidunt orci. Sed placerat nibh vitae ante maximus egestas maximus eu quam. Praesent vehicula mauris vestibulum, mattis turpis sollicitudin, aliquam felis. Pellentesque volutpat pharetra purus vel finibus. Vestibulum sed tempus dui. Maecenas auctor sit amet diam et porta. Morbi id libero at elit ultricies porta vel vitae nullam. """ - - chunks = chunkate_string(text, 50) - - assert ''.join(chunks) == text - assert all(len(chunk) <= 50 for chunk in chunks) diff --git a/pipenv/patched/prettytoml/tokens/__init__.py b/pipenv/patched/prettytoml/tokens/__init__.py deleted file mode 100644 index a5b2e9aef0..0000000000 --- a/pipenv/patched/prettytoml/tokens/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ - -""" -TOML lexical tokens. -""" - -class TokenType: - """ - A TokenType is a concrete type of a source token along with a defined priority and a higher-order kind. - - The priority will be used in determining the tokenization behaviour of the lexer in the following manner: - whenever more than one token is recognizable as the next possible token and they are all of equal source - length, this priority is going to be used to break the tie by favoring the token type of the lowest priority - value. A TokenType instance is naturally ordered by its priority. - """ - - def __init__(self, name, priority, is_metadata): - self._priority = priority - self._name = name - self._is_metadata = is_metadata - - @property - def is_metadata(self): - return self._is_metadata - - @property - def priority(self): - return self._priority - - def __repr__(self): - return "{}-{}".format(self.priority, self._name) - - def __lt__(self, other): - return isinstance(other, TokenType) and self._priority < other.priority - -# Possible types of tokens -TYPE_BOOLEAN = TokenType('boolean', 0, is_metadata=False) -TYPE_INTEGER = TokenType('integer', 0, is_metadata=False) -TYPE_OP_COMMA = TokenType('comma', 0, is_metadata=True) -TYPE_OP_SQUARE_LEFT_BRACKET = TokenType('square_left_bracket', 0, is_metadata=True) -TYPE_OP_SQUARE_RIGHT_BRACKET = TokenType('square_right_bracket', 0, is_metadata=True) -TYPE_OP_CURLY_LEFT_BRACKET = TokenType('curly_left_bracket', 0, is_metadata=True) -TYPE_OP_CURLY_RIGHT_BRACKET = TokenType('curly_right_bracket', 0, is_metadata=True) -TYPE_OP_ASSIGNMENT = TokenType('assignment', 0, is_metadata=True) -TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET = TokenType('double_square_left_bracket', 0, is_metadata=True) -TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET = TokenType('double_square_right_bracket', 0, is_metadata=True) -TYPE_FLOAT = TokenType('float', 1, is_metadata=False) -TYPE_DATE = TokenType('date', 40, is_metadata=False) -TYPE_OPT_DOT = TokenType('dot', 40, is_metadata=True) -TYPE_BARE_STRING = TokenType('bare_string', 50, is_metadata=False) -TYPE_STRING = TokenType('string', 90, is_metadata=False) -TYPE_MULTILINE_STRING = TokenType('multiline_string', 90, is_metadata=False) -TYPE_LITERAL_STRING = TokenType('literal_string', 90, is_metadata=False) -TYPE_MULTILINE_LITERAL_STRING = TokenType('multiline_literal_string', 90, is_metadata=False) -TYPE_NEWLINE = TokenType('newline', 91, is_metadata=True) -TYPE_WHITESPACE = TokenType('whitespace', 93, is_metadata=True) -TYPE_COMMENT = TokenType('comment', 95, is_metadata=True) - - -def is_operator(token): - """ - Returns True if the given token is an operator token. - """ - return token.type in ( - TYPE_OP_COMMA, - TYPE_OP_SQUARE_LEFT_BRACKET, - TYPE_OP_SQUARE_RIGHT_BRACKET, - TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET, - TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET, - TYPE_OP_CURLY_LEFT_BRACKET, - TYPE_OP_CURLY_RIGHT_BRACKET, - TYPE_OP_ASSIGNMENT, - TYPE_OPT_DOT, - ) - - -def is_string(token): - return token.type in ( - TYPE_STRING, - TYPE_MULTILINE_STRING, - TYPE_LITERAL_STRING, - TYPE_BARE_STRING, - TYPE_MULTILINE_LITERAL_STRING - ) - - -class Token: - """ - A token/lexeme in a TOML source file. - - A Token instance is naturally ordered by its type. - """ - - def __init__(self, _type, source_substring, col=None, row=None): - self._source_substring = source_substring - self._type = _type - self._col = col - self._row = row - - def __eq__(self, other): - if not isinstance(other, Token): - return False - return self.source_substring == other.source_substring and self.type == other.type - - @property - def col(self): - """ - Column number (1-indexed). - """ - return self._col - - @property - def row(self): - """ - Row number (1-indexed). - """ - return self._row - - @property - def type(self): - """ - One of of the TOKEN_TYPE_* constants. - """ - return self._type - - @property - def source_substring(self): - """ - The substring of the initial source file containing this token. - """ - return self._source_substring - - def __lt__(self, other): - return isinstance(other, Token) and self.type < other.type - - def __repr__(self): - return "{}: {}".format(self.type, self.source_substring) diff --git a/pipenv/patched/prettytoml/tokens/errors.py b/pipenv/patched/prettytoml/tokens/errors.py deleted file mode 100644 index d40cb8e997..0000000000 --- a/pipenv/patched/prettytoml/tokens/errors.py +++ /dev/null @@ -1,13 +0,0 @@ -from prettytoml.errors import TOMLError - - -class DeserializationError(TOMLError): - pass - - -class BadEscapeCharacter(TOMLError): - pass - - -class MalformedDateError(DeserializationError): - pass diff --git a/pipenv/patched/prettytoml/tokens/py2toml.py b/pipenv/patched/prettytoml/tokens/py2toml.py deleted file mode 100644 index 2decd02102..0000000000 --- a/pipenv/patched/prettytoml/tokens/py2toml.py +++ /dev/null @@ -1,154 +0,0 @@ - -""" -A converter of python values to TOML Token instances. -""" -from __future__ import unicode_literals -import codecs -import datetime -import six -from prettytoml import tokens -import re -from prettytoml.errors import TOMLError -from prettytoml.tokens import Token -from prettytoml.util import chunkate_string - - -class NotPrimitiveError(TOMLError): - pass - - -_operator_tokens_by_type = { - tokens.TYPE_OP_SQUARE_LEFT_BRACKET: tokens.Token(tokens.TYPE_OP_SQUARE_LEFT_BRACKET, u'['), - tokens.TYPE_OP_SQUARE_RIGHT_BRACKET: tokens.Token(tokens.TYPE_OP_SQUARE_RIGHT_BRACKET, u']'), - tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET: tokens.Token(tokens.TYPE_OP_DOUBLE_SQUARE_LEFT_BRACKET, u'[['), - tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET: tokens.Token(tokens.TYPE_OP_DOUBLE_SQUARE_RIGHT_BRACKET, u']]'), - tokens.TYPE_OP_COMMA: tokens.Token(tokens.TYPE_OP_COMMA, u','), - tokens.TYPE_NEWLINE: tokens.Token(tokens.TYPE_NEWLINE, u'\n'), - tokens.TYPE_OPT_DOT: tokens.Token(tokens.TYPE_OPT_DOT, u'.'), -} - - -def operator_token(token_type): - return _operator_tokens_by_type[token_type] - - -def create_primitive_token(value, multiline_strings_allowed=True): - """ - Creates and returns a single token for the given primitive atomic value. - - Raises NotPrimitiveError when the given value is not a primitive atomic value - """ - if value is None: - return create_primitive_token('') - elif isinstance(value, bool): - return tokens.Token(tokens.TYPE_BOOLEAN, u'true' if value else u'false') - elif isinstance(value, int): - return tokens.Token(tokens.TYPE_INTEGER, u'{}'.format(value)) - elif isinstance(value, float): - return tokens.Token(tokens.TYPE_FLOAT, u'{}'.format(value)) - elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)): - s = value.isoformat() - if s.endswith('+00:00'): - s = s[:-6] + 'Z' - return tokens.Token(tokens.TYPE_DATE, s) - elif isinstance(value, six.string_types): - return create_string_token(value, multiline_strings_allowed=multiline_strings_allowed) - - raise NotPrimitiveError("{} of type {}".format(value, type(value))) - - -_bare_string_regex = re.compile('^[a-zA-Z_-]*$') - - -def create_string_token(text, bare_string_allowed=False, multiline_strings_allowed=True): - """ - Creates and returns a single string token. - - Raises ValueError on non-string input. - """ - - if not isinstance(text, six.string_types): - raise ValueError('Given value must be a string') - - if text == '': - return tokens.Token(tokens.TYPE_STRING, '""'.format(_escape_single_line_quoted_string(text))) - elif bare_string_allowed and _bare_string_regex.match(text): - return tokens.Token(tokens.TYPE_BARE_STRING, text) - elif multiline_strings_allowed and (len(tuple(c for c in text if c == '\n')) >= 2 or len(text) > 80): - # If containing two or more newlines or is longer than 80 characters we'll use the multiline string format - return _create_multiline_string_token(text) - else: - return tokens.Token(tokens.TYPE_STRING, '"{}"'.format(_escape_single_line_quoted_string(text))) - - -def _escape_single_line_quoted_string(text): - text = text.decode('utf-8') if isinstance(text, six.binary_type) else text - start = 0 - i = 0 - res = [] - _escapes = {'\n': '\\n', '\r': '\\r', '\\': '\\\\', '\t': '\\t', - '\b': '\\b', '\f': '\\f', '"': '\\"'} - - def flush(): - if start < i: - res.append(text[start:i]) - return i + 1 - - while i < len(text): - c = text[i] - if c in _escapes: - start = flush() - res.append(_escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append('\\u%04x' % ord(c)) - i += 1 - - flush() - return ''.join(res) - - -def _create_multiline_string_token(text): - escaped = text.replace(u'"""', u'\"\"\"') - if len(escaped) > 50: - return tokens.Token(tokens.TYPE_MULTILINE_STRING, u'"""\n{}\\\n"""'.format(_break_long_text(escaped))) - else: - return tokens.Token(tokens.TYPE_MULTILINE_STRING, u'"""{}"""'.format(escaped)) - - -def _break_long_text(text, maximum_length=75): - """ - Breaks into lines of 75 character maximum length that are terminated by a backslash. - """ - - def next_line(remaining_text): - - # Returns a line and the remaining text - - if '\n' in remaining_text and remaining_text.index('\n') < maximum_length: - i = remaining_text.index('\n') - return remaining_text[:i+1], remaining_text[i+2:] - elif len(remaining_text) > maximum_length and ' ' in remaining_text: - i = remaining_text[:maximum_length].rfind(' ') - return remaining_text[:i+1] + '\\\n', remaining_text[i+2:] - else: - return remaining_text, '' - - remaining_text = text - lines = [] - while remaining_text: - line, remaining_text = next_line(remaining_text) - lines += [line] - - return ''.join(lines) - - -def create_whitespace(source_substring): - return Token(tokens.TYPE_WHITESPACE, source_substring) - - -def create_multiline_string(text, maximum_line_length=120): - def escape(t): - return t.replace(u'"""', six.u(r'\"\"\"')) - source_substring = u'"""\n{}"""'.format(u'\\\n'.join(chunkate_string(escape(text), maximum_line_length))) - return Token(tokens.TYPE_MULTILINE_STRING, source_substring) diff --git a/pipenv/patched/prettytoml/tokens/test_py2toml.py b/pipenv/patched/prettytoml/tokens/test_py2toml.py deleted file mode 100644 index 0d029c5dc1..0000000000 --- a/pipenv/patched/prettytoml/tokens/test_py2toml.py +++ /dev/null @@ -1,69 +0,0 @@ -import datetime - -import strict_rfc3339 - -from prettytoml import tokens -from prettytoml.tokens import py2toml - - -def test_string(): - assert py2toml.create_string_token('fawzy', bare_string_allowed=True) == tokens.Token(tokens.TYPE_BARE_STRING, 'fawzy') - assert py2toml.create_primitive_token('I am a "cr\'azy" sentence.') == \ - tokens.Token(tokens.TYPE_STRING, '"I am a \\"cr\'azy\\" sentence."') - - -def test_multiline_string(): - text = 'The\nSuper\nT"""OML"""\n\nIs coming' - - primitive_token = py2toml.create_primitive_token(text) - - assert primitive_token.source_substring == '"""The\nSuper\nT\"\"\"OML\"\"\"\n\nIs coming"""' - - -def test_long_string(): - text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse faucibus nibh id urna euismod, " \ - "vitae blandit nisi blandit. Nam eu odio ex. Praesent iaculis sapien justo. Proin vehicula orci rhoncus " \ - "risus mattis cursus. Sed quis commodo diam. Morbi dictum fermentum ex. Ut augue lorem, facilisis eu " \ - "posuere ut, ullamcorper et quam. Donec porta neque eget erat lacinia, in convallis elit scelerisque. " \ - "Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Praesent " \ - "felis metus, venenatis eu aliquam vel, fringilla in turpis. Praesent interdum pulvinar enim, et mattis " \ - "urna dapibus et. Sed ut egestas mauris. Etiam eleifend dui." - - primitive_token = py2toml.create_primitive_token(text) - - assert primitive_token.source_substring[3:-3] == r""" -Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse \ -aucibus nibh id urna euismod, vitae blandit nisi blandit. Nam eu odio ex. \ -raesent iaculis sapien justo. Proin vehicula orci rhoncus risus mattis \ -ursus. Sed quis commodo diam. Morbi dictum fermentum ex. Ut augue lorem, \ -acilisis eu posuere ut, ullamcorper et quam. Donec porta neque eget erat \ -acinia, in convallis elit scelerisque. Class aptent taciti sociosqu ad \ -itora torquent per conubia nostra, per inceptos himenaeos. Praesent felis \ -etus, venenatis eu aliquam vel, fringilla in turpis. Praesent interdum \ -ulvinar enim, et mattis urna dapibus et. Sed ut egestas mauris. Etiam \ -leifend dui.\ -""" - - -def test_int(): - assert py2toml.create_primitive_token(42) == tokens.Token(tokens.TYPE_INTEGER, '42') - - -def test_float(): - assert py2toml.create_primitive_token(4.2) == tokens.Token(tokens.TYPE_FLOAT, '4.2') - - -def test_bool(): - assert py2toml.create_primitive_token(False) == tokens.Token(tokens.TYPE_BOOLEAN, 'false') - assert py2toml.create_primitive_token(True) == tokens.Token(tokens.TYPE_BOOLEAN, 'true') - - -def test_date(): - ts = strict_rfc3339.rfc3339_to_timestamp('1979-05-27T00:32:00-07:00') - dt = datetime.datetime.fromtimestamp(ts) - assert py2toml.create_primitive_token(dt) == tokens.Token(tokens.TYPE_DATE, '1979-05-27T07:32:00Z') - - -def test_none(): - t = py2toml.create_primitive_token(None) - assert t.type == tokens.TYPE_STRING and t.source_substring == '""' diff --git a/pipenv/patched/prettytoml/tokens/test_toml2py.py b/pipenv/patched/prettytoml/tokens/test_toml2py.py deleted file mode 100644 index ce1663654d..0000000000 --- a/pipenv/patched/prettytoml/tokens/test_toml2py.py +++ /dev/null @@ -1,86 +0,0 @@ -from datetime import datetime - -import pytz - -from prettytoml import tokens -from prettytoml.tokens import toml2py -from prettytoml.tokens.errors import BadEscapeCharacter, DeserializationError - - -def test_integer(): - t1 = tokens.Token(tokens.TYPE_INTEGER, '42') - t2 = tokens.Token(tokens.TYPE_INTEGER, '1_001_2') - - assert toml2py.deserialize(t1) == 42 - assert toml2py.deserialize(t2) == 10012 - - -def test_float(): - tokens_and_values = ( - ('4.2', 4.2), - ('12e2', 12e2), - ('1_000e2', 1e5), - ('314.1e-2', 3.141) - ) - for token_string, value in tokens_and_values: - token = tokens.Token(tokens.TYPE_FLOAT, token_string) - assert toml2py.deserialize(token) == value - - -def test_string(): - - t0 = tokens.Token(tokens.TYPE_BARE_STRING, 'fawzy') - assert toml2py.deserialize(t0) == 'fawzy' - - t1 = tokens.Token(tokens.TYPE_STRING, '"I\'m a string. \\"You can quote me\\". Name\\tJos\\u00E9\\nLocation\\tSF."') - assert toml2py.deserialize(t1) == u'I\'m a string. "You can quote me". Name\tJos\xe9\nLocation\tSF.' - - t2 = tokens.Token(tokens.TYPE_MULTILINE_STRING, '"""\nRoses are red\nViolets are blue"""') - assert toml2py.deserialize(t2) == 'Roses are red\nViolets are blue' - - t3_str = '"""\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog."""' - t3 = tokens.Token(tokens.TYPE_MULTILINE_STRING, t3_str) - assert toml2py.deserialize(t3) == 'The quick brown fox jumps over the lazy dog.' - - t4_str = '"""\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n """' - t4 = tokens.Token(tokens.TYPE_MULTILINE_STRING, t4_str) - assert toml2py.deserialize(t4) == 'The quick brown fox jumps over the lazy dog.' - - t5 = tokens.Token(tokens.TYPE_LITERAL_STRING, r"'C:\Users\nodejs\templates'") - assert toml2py.deserialize(t5) == r'C:\Users\nodejs\templates' - - t6_str = "'''\nThe first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n'''" - t6 = tokens.Token(tokens.TYPE_MULTILINE_LITERAL_STRING, t6_str) - assert toml2py.deserialize(t6) == 'The first newline is\ntrimmed in raw strings.\n All' \ - ' other whitespace\n is preserved.\n' - - -def test_date(): - t0 = tokens.Token(tokens.TYPE_DATE, '1979-05-27T07:32:00Z') - assert toml2py.deserialize(t0) == datetime(1979, 5, 27, 7, 32, tzinfo=pytz.utc) - - t1 = tokens.Token(tokens.TYPE_DATE, '1979-05-27T00:32:00-07:00') - assert toml2py.deserialize(t1) == datetime(1979, 5, 27, 7, 32, tzinfo=pytz.utc) - - t3 = tokens.Token(tokens.TYPE_DATE, '1987-07-05T17:45:00') - try: - toml2py.deserialize(t3) - assert False, 'Should detect malformed date' - except DeserializationError: - pass - - -def test_unescaping_a_string(): - - bad_escapes = ( - r"This string has a bad \a escape character.", - r'\x33', - ) - - for source in bad_escapes: - # Should complain about bad escape jobs - try: - toml2py._unescape_str(source) - assert False, "Should have thrown an exception for: " + source - except BadEscapeCharacter: - pass diff --git a/pipenv/patched/prettytoml/tokens/toml2py.py b/pipenv/patched/prettytoml/tokens/toml2py.py deleted file mode 100644 index 56804437fd..0000000000 --- a/pipenv/patched/prettytoml/tokens/toml2py.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import unicode_literals -import re -import string -import iso8601 -from prettytoml import tokens -from prettytoml.tokens import TYPE_BOOLEAN, TYPE_INTEGER, TYPE_FLOAT, TYPE_DATE, \ - TYPE_MULTILINE_STRING, TYPE_BARE_STRING, TYPE_MULTILINE_LITERAL_STRING, TYPE_LITERAL_STRING, \ - TYPE_STRING -import codecs -import six -from prettytoml.tokens.errors import MalformedDateError -from .errors import BadEscapeCharacter -import functools -import operator - - -def deserialize(token): - """ - Deserializes the value of a single tokens.Token instance based on its type. - - Raises DeserializationError when appropriate. - """ - - if token.type == TYPE_BOOLEAN: - return _to_boolean(token) - elif token.type == TYPE_INTEGER: - return _to_int(token) - elif token.type == TYPE_FLOAT: - return _to_float(token) - elif token.type == TYPE_DATE: - return _to_date(token) - elif token.type in (TYPE_STRING, TYPE_MULTILINE_STRING, TYPE_BARE_STRING, - TYPE_LITERAL_STRING, TYPE_MULTILINE_LITERAL_STRING): - return _to_string(token) - else: - raise Exception('This should never happen!') - - -def _unescape_str(text): - """ - Unescapes a string according the TOML spec. Raises BadEscapeCharacter when appropriate. - """ - text = text.decode('utf-8') if isinstance(text, six.binary_type) else text - tokens = [] - i = 0 - basicstr_re = re.compile(r'[^"\\\000-\037]*') - unicode_re = re.compile(r'[uU]((?<=u)[a-fA-F0-9]{4}|(?<=U)[a-fA-F0-9]{8})') - escapes = { - 'b': '\b', - 't': '\t', - 'n': '\n', - 'f': '\f', - 'r': '\r', - '\\': '\\', - '"': '"', - '/': '/', - "'": "'" - } - while True: - m = basicstr_re.match(text, i) - i = m.end() - tokens.append(m.group()) - if i == len(text) or text[i] != '\\': - break - else: - i += 1 - if unicode_re.match(text, i): - m = unicode_re.match(text, i) - i = m.end() - tokens.append(six.unichr(int(m.group(1), 16))) - else: - if text[i] not in escapes: - raise BadEscapeCharacter - tokens.append(escapes[text[i]]) - i += 1 - return ''.join(tokens) - - -def _to_string(token): - if token.type == tokens.TYPE_BARE_STRING: - return token.source_substring - - elif token.type == tokens.TYPE_STRING: - escaped = token.source_substring[1:-1] - return _unescape_str(escaped) - - elif token.type == tokens.TYPE_MULTILINE_STRING: - escaped = token.source_substring[3:-3] - - # Drop the first newline if existed - if escaped and escaped[0] == '\n': - escaped = escaped[1:] - - # Remove all occurrences of a slash-newline-zero-or-more-whitespace patterns - escaped = re.sub(r'\\\n\s*', repl='', string=escaped, flags=re.DOTALL) - return _unescape_str(escaped) - - elif token.type == tokens.TYPE_LITERAL_STRING: - return token.source_substring[1:-1] - - elif token.type == tokens.TYPE_MULTILINE_LITERAL_STRING: - text = token.source_substring[3:-3] - if text[0] == '\n': - text = text[1:] - return text - - raise RuntimeError('Control should never reach here.') - - -def _to_int(token): - return int(token.source_substring.replace('_', '')) - - -def _to_float(token): - assert token.type == tokens.TYPE_FLOAT - string = token.source_substring.replace('_', '') - return float(string) - - -def _to_boolean(token): - return token.source_substring == 'true' - - -_correct_date_format = re.compile(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(Z|(\+|-)\d{2}:\d{2})') - - -def _to_date(token): - if not _correct_date_format.match(token.source_substring): - raise MalformedDateError - return iso8601.parse_date(token.source_substring) diff --git a/pipenv/patched/prettytoml/util.py b/pipenv/patched/prettytoml/util.py deleted file mode 100644 index 155f00e207..0000000000 --- a/pipenv/patched/prettytoml/util.py +++ /dev/null @@ -1,141 +0,0 @@ -import math -import itertools - - -def is_sequence_like(x): - """ - Returns True if x exposes a sequence-like interface. - """ - required_attrs = ( - '__len__', - '__getitem__' - ) - return all(hasattr(x, attr) for attr in required_attrs) - - -def is_dict_like(x): - """ - Returns True if x exposes a dict-like interface. - """ - required_attrs = ( - '__len__', - '__getitem__', - 'keys', - 'values', - ) - return all(hasattr(x, attr) for attr in required_attrs) - - -def join_with(iterable, separator): - """ - Joins elements from iterable with separator and returns the produced sequence as a list. - - separator must be addable to a list. - """ - inputs = list(iterable) - b = [] - for i, element in enumerate(inputs): - if isinstance(element, (list, tuple, set)): - b += tuple(element) - else: - b += [element] - if i < len(inputs)-1: - b += separator - return b - - -def chunkate_string(text, length): - """ - Iterates over the given seq in chunks of at maximally the given length. Will never break a whole word. - """ - iterator_index = 0 - - def next_newline(): - try: - return next(i for (i, c) in enumerate(text) if i > iterator_index and c == '\n') - except StopIteration: - return len(text) - - def next_breaker(): - try: - return next(i for (i, c) in reversed(tuple(enumerate(text))) - if i >= iterator_index and - (i < iterator_index+length) and - c in (' ', '\t')) - except StopIteration: - return len(text) - - while iterator_index < len(text): - next_chunk = text[iterator_index:min(next_newline(), next_breaker()+1)] - iterator_index += len(next_chunk) - yield next_chunk - - -def flatten_nested(nested_dicts): - """ - Flattens dicts and sequences into one dict with tuples of keys representing the nested keys. - - Example - >>> dd = { \ - 'dict1': {'name': 'Jon', 'id': 42}, \ - 'dict2': {'name': 'Sam', 'id': 41}, \ - 'seq1': [{'one': 1, 'two': 2}] \ - } - - >>> flatten_nested(dd) == { \ - ('dict1', 'name'): 'Jon', ('dict1', 'id'): 42, \ - ('dict2', 'name'): 'Sam', ('dict2', 'id'): 41, \ - ('seq1', 0, 'one'): 1, ('seq1', 0, 'two'): 2, \ - } - True - """ - assert isinstance(nested_dicts, (dict, list, tuple)), 'Only works with a collection parameter' - - def items(c): - if isinstance(c, dict): - return c.items() - elif isinstance(c, (list, tuple)): - return enumerate(c) - else: - raise RuntimeError('c must be a collection') - - def flatten(dd): - output = {} - for k, v in items(dd): - if isinstance(v, (dict, list, tuple)): - for child_key, child_value in flatten(v).items(): - output[(k,) + child_key] = child_value - else: - output[(k,)] = v - return output - - return flatten(nested_dicts) - - -class PeekableIterator: - - # Returned by peek() when the iterator is exhausted. Truthiness is False. - Nothing = tuple() - - def __init__(self, iter): - self._iter = iter - - def __next__(self): - return next(self._iter) - - def next(self): - return self.__next__() - - def __iter__(self): - return self - - def peek(self): - """ - Returns PeekableIterator.Nothing when the iterator is exhausted. - """ - try: - v = next(self._iter) - self._iter = itertools.chain((v,), self._iter) - return v - except StopIteration: - return PeekableIterator.Nothing diff --git a/pipenv/project.py b/pipenv/project.py index 4b90f4934d..13c6619f64 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -75,7 +75,7 @@ class _LockFileEncoder(json.JSONEncoder): This adds a few characteristics to the encoder: * The JSON is always prettified with indents and spaces. - * PrettyTOML's container elements are seamlessly encodable. + * TOMLKit's container elements are seamlessly encodable. * The output is always UTF-8-encoded text, never binary, even on Python 2. """ @@ -85,11 +85,7 @@ def __init__(self): ) def default(self, obj): - from prettytoml.elements.common import ContainerElement, TokenElement - - if isinstance(obj, (ContainerElement, TokenElement)): - return obj.primitive_value - elif isinstance(obj, vistir.compat.Path): + if isinstance(obj, vistir.compat.Path): obj = obj.as_posix() return super(_LockFileEncoder, self).default(obj) diff --git a/tasks/vendoring/patches/patched/contoml.patch b/tasks/vendoring/patches/patched/contoml.patch deleted file mode 100644 index b9b2e9d7f4..0000000000 --- a/tasks/vendoring/patches/patched/contoml.patch +++ /dev/null @@ -1,28 +0,0 @@ -diff --git a/pipenv/patched/contoml/file/file.py b/pipenv/patched/contoml/file/file.py -index 5033a7b..99ce148 100644 ---- a/pipenv/patched/contoml/file/file.py -+++ b/pipenv/patched/contoml/file/file.py -@@ -30,6 +30,14 @@ class TOMLFile: - except KeyError: - return FreshTable(parent=self, name=item, is_array=False) - -+ def get(self, item, default=None): -+ """This was not here for who knows why.""" -+ -+ if item not in self: -+ return default -+ else: -+ return self.__getitem__(item) -+ - def __contains__(self, item): - return item in self.keys() - -@@ -223,7 +231,7 @@ class TOMLFile: - if has_anonymous_entry(): - return items - else: -- return items + [('', self[''])] -+ return list(items) + [('', self[''])] - - @property - def primitive(self): diff --git a/tasks/vendoring/patches/patched/prettytoml-newlinefix.patch b/tasks/vendoring/patches/patched/prettytoml-newlinefix.patch deleted file mode 100644 index 2b1066a10f..0000000000 --- a/tasks/vendoring/patches/patched/prettytoml-newlinefix.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/pipenv/patched/prettytoml/elements/traversal/__init__.py b/pipenv/patched/prettytoml/elements/traversal/__init__.py -index 5b98045..c93506e 100644 ---- a/pipenv/patched/prettytoml/elements/traversal/__init__.py -+++ b/pipenv/patched/prettytoml/elements/traversal/__init__.py -@@ -78,7 +78,7 @@ class TraversalMixin: - - if following_comment == float('-inf'): - return following_newline -- if following_newline == float('inf'): -+ if following_newline == float('-inf'): - return following_comment - - if following_newline < following_comment: diff --git a/tasks/vendoring/patches/patched/prettytoml-python37.patch b/tasks/vendoring/patches/patched/prettytoml-python37.patch deleted file mode 100644 index 5039a1c7eb..0000000000 --- a/tasks/vendoring/patches/patched/prettytoml-python37.patch +++ /dev/null @@ -1,32 +0,0 @@ -From c44f2126fb5c75a5f5afd9d320c9f6cfc4ce3384 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= -Date: Tue, 26 Jun 2018 21:02:45 +0200 -Subject: [PATCH] Catch StopIteration in AbstractTable._enumerate_items - -This makes PEP 479 enabled Pythons (such as 3.7) work again. - -Otherwise you get: - - RuntimeError: generator raised StopIteration - -Fixes https://github.com/pypa/pipenv/issues/2426 ---- - pipenv/patched/prettytoml/elements/abstracttable.py | 5 ++++- - 1 file changed, 4 insertions(+), 1 deletion(-) - -diff --git a/pipenv/patched/prettytoml/elements/abstracttable.py b/pipenv/patched/prettytoml/elements/abstracttable.py -index 59fd574..627da0e 100644 ---- a/pipenv/patched/prettytoml/elements/abstracttable.py -+++ b/pipenv/patched/prettytoml/elements/abstracttable.py -@@ -19,7 +19,10 @@ def _enumerate_items(self): - """ - non_metadata = self._enumerate_non_metadata_sub_elements() - while True: -- yield next(non_metadata), next(non_metadata) -+ try: -+ yield next(non_metadata), next(non_metadata) -+ except StopIteration: -+ return - - def items(self): - for (key_i, key), (value_i, value) in self._enumerate_items(): diff --git a/tasks/vendoring/patches/patched/prettytoml-table-iter.patch b/tasks/vendoring/patches/patched/prettytoml-table-iter.patch deleted file mode 100644 index 9ec52633c4..0000000000 --- a/tasks/vendoring/patches/patched/prettytoml-table-iter.patch +++ /dev/null @@ -1,29 +0,0 @@ -diff --git a/pipenv/patched/prettytoml/elements/abstracttable.py b/pipenv/patched/prettytoml/elements/abstracttable.py -index 59fd5748..48663aed 100644 ---- a/pipenv/patched/prettytoml/elements/abstracttable.py -+++ b/pipenv/patched/prettytoml/elements/abstracttable.py -@@ -1,8 +1,13 @@ -+try: -+ from collections.abc import Mapping -+except ImportError: -+ from collections import Mapping -+ - from prettytoml.elements.common import ContainerElement - from prettytoml.elements import traversal - - --class AbstractTable(ContainerElement, traversal.TraversalMixin): -+class AbstractTable(ContainerElement, traversal.TraversalMixin, Mapping): - """ - Common code for handling tables as key-value pairs with metadata elements sprinkled all over. - -@@ -37,6 +42,9 @@ class AbstractTable(ContainerElement, traversal.TraversalMixin): - def __len__(self): - return len(tuple(self._enumerate_items())) - -+ def __iter__(self): -+ return (key for key, _ in self.items()) -+ - def __contains__(self, item): - return item in self.keys() - diff --git a/tasks/vendoring/patches/patched/prettytoml-unicode.patch b/tasks/vendoring/patches/patched/prettytoml-unicode.patch deleted file mode 100644 index 54f4c6218b..0000000000 --- a/tasks/vendoring/patches/patched/prettytoml-unicode.patch +++ /dev/null @@ -1,132 +0,0 @@ -diff --git a/pipenv/patched/prettytoml/tokens/py2toml.py b/pipenv/patched/prettytoml/tokens/py2toml.py -index 8299195..2decd02 100644 ---- a/pipenv/patched/prettytoml/tokens/py2toml.py -+++ b/pipenv/patched/prettytoml/tokens/py2toml.py -@@ -2,6 +2,7 @@ - """ - A converter of python values to TOML Token instances. - """ -+from __future__ import unicode_literals - import codecs - import datetime - import six -@@ -81,10 +82,30 @@ def create_string_token(text, bare_string_allowed=False, multiline_strings_allow - - - def _escape_single_line_quoted_string(text): -- if six.PY2: -- return text.encode('unicode-escape').encode('string-escape').replace('"', '\\"').replace("\\'", "'") -- else: -- return codecs.encode(text, 'unicode-escape').decode().replace('"', '\\"') -+ text = text.decode('utf-8') if isinstance(text, six.binary_type) else text -+ start = 0 -+ i = 0 -+ res = [] -+ _escapes = {'\n': '\\n', '\r': '\\r', '\\': '\\\\', '\t': '\\t', -+ '\b': '\\b', '\f': '\\f', '"': '\\"'} -+ -+ def flush(): -+ if start < i: -+ res.append(text[start:i]) -+ return i + 1 -+ -+ while i < len(text): -+ c = text[i] -+ if c in _escapes: -+ start = flush() -+ res.append(_escapes[c]) -+ elif ord(c) < 0x20: -+ start = flush() -+ res.append('\\u%04x' % ord(c)) -+ i += 1 -+ -+ flush() -+ return ''.join(res) - - - def _create_multiline_string_token(text): -diff --git a/pipenv/patched/prettytoml/tokens/toml2py.py b/pipenv/patched/prettytoml/tokens/toml2py.py -index 2bf9c1c..5680443 100644 ---- a/pipenv/patched/prettytoml/tokens/toml2py.py -+++ b/pipenv/patched/prettytoml/tokens/toml2py.py -@@ -1,3 +1,4 @@ -+from __future__ import unicode_literals - import re - import string - import iso8601 -@@ -39,42 +40,40 @@ def _unescape_str(text): - """ - Unescapes a string according the TOML spec. Raises BadEscapeCharacter when appropriate. - """ -- -- # Detect bad escape jobs -- bad_escape_regexp = re.compile(r'([^\\]|^)\\[^btnfr"\\uU]') -- if bad_escape_regexp.findall(text): -- raise BadEscapeCharacter -- -- # Do the unescaping -- if six.PY2: -- return _unicode_escaped_string(text).decode('string-escape').decode('unicode-escape') -- else: -- return codecs.decode(_unicode_escaped_string(text), 'unicode-escape') -- -- --def _unicode_escaped_string(text): -- """ -- Escapes all unicode characters in the given string -- """ -- -- if six.PY2: -- text = unicode(text) -- -- def is_unicode(c): -- return c.lower() not in string.ascii_letters + string.whitespace + string.punctuation + string.digits -- -- def escape_unicode_char(x): -- if six.PY2: -- return x.encode('unicode-escape') -+ text = text.decode('utf-8') if isinstance(text, six.binary_type) else text -+ tokens = [] -+ i = 0 -+ basicstr_re = re.compile(r'[^"\\\000-\037]*') -+ unicode_re = re.compile(r'[uU]((?<=u)[a-fA-F0-9]{4}|(?<=U)[a-fA-F0-9]{8})') -+ escapes = { -+ 'b': '\b', -+ 't': '\t', -+ 'n': '\n', -+ 'f': '\f', -+ 'r': '\r', -+ '\\': '\\', -+ '"': '"', -+ '/': '/', -+ "'": "'" -+ } -+ while True: -+ m = basicstr_re.match(text, i) -+ i = m.end() -+ tokens.append(m.group()) -+ if i == len(text) or text[i] != '\\': -+ break - else: -- return codecs.encode(x, 'unicode-escape') -- -- if any(is_unicode(c) for c in text): -- homogeneous_chars = tuple(escape_unicode_char(c) if is_unicode(c) else c.encode() for c in text) -- homogeneous_bytes = functools.reduce(operator.add, homogeneous_chars) -- return homogeneous_bytes.decode() -- else: -- return text -+ i += 1 -+ if unicode_re.match(text, i): -+ m = unicode_re.match(text, i) -+ i = m.end() -+ tokens.append(six.unichr(int(m.group(1), 16))) -+ else: -+ if text[i] not in escapes: -+ raise BadEscapeCharacter -+ tokens.append(escapes[text[i]]) -+ i += 1 -+ return ''.join(tokens) - - - def _to_string(token): diff --git a/tasks/vendoring/patches/patched/prettytoml.patch b/tasks/vendoring/patches/patched/prettytoml.patch deleted file mode 100644 index 85dfb791f4..0000000000 --- a/tasks/vendoring/patches/patched/prettytoml.patch +++ /dev/null @@ -1,78 +0,0 @@ -diff --git a/pipenv/patched/prettytoml/_version.py b/pipenv/patched/prettytoml/_version.py -index 4f146e6..e0f1547 100644 ---- a/pipenv/patched/prettytoml/_version.py -+++ b/pipenv/patched/prettytoml/_version.py -@@ -1 +1 @@ --VERSION = '0.03' -+VERSION = 'master' -diff --git a/pipenv/patched/prettytoml/elements/table.py b/pipenv/patched/prettytoml/elements/table.py -index f78a6d1..cdc3ed4 100644 ---- a/pipenv/patched/prettytoml/elements/table.py -+++ b/pipenv/patched/prettytoml/elements/table.py -@@ -94,9 +94,9 @@ class TableElement(abstracttable.AbstractTable): - value_element, - factory.create_newline_element(), - ] -- -+ - insertion_index = self._find_insertion_index() -- -+ - self._sub_elements = \ - self.sub_elements[:insertion_index] + inserted_elements + self.sub_elements[insertion_index:] - -@@ -105,11 +105,16 @@ class TableElement(abstracttable.AbstractTable): - preceding_newline = self._find_preceding_newline(begin) - if preceding_newline >= 0: - begin = preceding_newline -- end = self._find_following_newline(begin) -+ end = self._find_following_line_terminator(begin) - if end < 0: - end = len(tuple(self._sub_elements)) - self._sub_elements = self.sub_elements[:begin] + self.sub_elements[end:] - -+ def pop(self, key): -+ v = self[key] -+ del self[key] -+ return v -+ - def value(self): - return self - -diff --git a/pipenv/patched/prettytoml/tokens/py2toml.py b/pipenv/patched/prettytoml/tokens/py2toml.py -index 3db97b4..8299195 100644 ---- a/pipenv/patched/prettytoml/tokens/py2toml.py -+++ b/pipenv/patched/prettytoml/tokens/py2toml.py -@@ -5,11 +5,8 @@ A converter of python values to TOML Token instances. - import codecs - import datetime - import six --import strict_rfc3339 --import timestamp - from prettytoml import tokens - import re --from prettytoml.elements.metadata import NewlineElement - from prettytoml.errors import TOMLError - from prettytoml.tokens import Token - from prettytoml.util import chunkate_string -@@ -49,15 +46,17 @@ def create_primitive_token(value, multiline_strings_allowed=True): - elif isinstance(value, float): - return tokens.Token(tokens.TYPE_FLOAT, u'{}'.format(value)) - elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)): -- ts = timestamp(value) // 1000 -- return tokens.Token(tokens.TYPE_DATE, strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)) -+ s = value.isoformat() -+ if s.endswith('+00:00'): -+ s = s[:-6] + 'Z' -+ return tokens.Token(tokens.TYPE_DATE, s) - elif isinstance(value, six.string_types): - return create_string_token(value, multiline_strings_allowed=multiline_strings_allowed) - - raise NotPrimitiveError("{} of type {}".format(value, type(value))) - - --_bare_string_regex = re.compile('^[a-zA-Z0-9_-]*$') -+_bare_string_regex = re.compile('^[a-zA-Z_-]*$') - - - def create_string_token(text, bare_string_allowed=False, multiline_strings_allowed=True): diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index 1e00bbb70b..5e1bafb919 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -160,4 +160,4 @@ def test_rewrite_outline_table(PipenvInstance, pypi): with open(p.pipfile_path) as f: contents = f.read() assert "[packages.requests]" not in contents - assert 'requests = { version = "*" }' in contents + assert 'requests = {version = "*"}' in contents diff --git a/tests/unit/test_vendor.py b/tests/unit/test_vendor.py index 704b37fb7f..6514b16290 100644 --- a/tests/unit/test_vendor.py +++ b/tests/unit/test_vendor.py @@ -1,43 +1,10 @@ # -*- coding: utf-8 -*- # We need to import the patched packages directly from sys.path, so the # identity checks can pass. -import pipenv # noqa - -import datetime import os -import pytest -import pytz - -import contoml +import pipenv # noqa from pipfile.api import PipfileParser -from prettytoml import lexer, tokens -from prettytoml.elements.atomic import AtomicElement -from prettytoml.elements.metadata import ( - WhitespaceElement, PunctuationElement, CommentElement -) -from prettytoml.elements.table import TableElement -from prettytoml.tokens.py2toml import create_primitive_token - - -def test_table(): - initial_toml = """id=42 # My id\nage=14""" - tokens = tuple(lexer.tokenize(initial_toml)) - table = TableElement( - [ - AtomicElement(tokens[0:1]), - PunctuationElement(tokens[1:2]), - AtomicElement(tokens[2:3]), - WhitespaceElement(tokens[3:4]), - CommentElement(tokens[4:6]), - AtomicElement(tokens[6:7]), - PunctuationElement(tokens[7:8]), - AtomicElement(tokens[8:9]), - ] - ) - assert set(table.items()) == {('id', 42), ('age', 14)} - del table['id'] - assert set(table.items()) == {('age', 14)} class TestPipfileParser: @@ -71,38 +38,3 @@ def test_inject_environment_variables(self): assert parsed_dict["list"][1] == {} assert parsed_dict["bool"] is True assert parsed_dict["none"] is None - - -@pytest.mark.parametrize('dt, content', [ - ( # Date. - datetime.date(1992, 8, 19), - '1992-08-19', - ), - ( # Naive time. - datetime.time(15, 10), - '15:10:00', - ), - ( # Aware time in UTC. - datetime.time(15, 10, tzinfo=pytz.UTC), - '15:10:00Z', - ), - ( # Aware local time. - datetime.time(15, 10, tzinfo=pytz.FixedOffset(8 * 60)), - '15:10:00+08:00', - ), - ( # Naive datetime. - datetime.datetime(1992, 8, 19, 15, 10), - '1992-08-19T15:10:00', - ), - ( # Aware datetime in UTC. - datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.UTC), - '1992-08-19T15:10:00Z', - ), - ( # Aware local datetime. - datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.FixedOffset(8 * 60)), - '1992-08-19T15:10:00+08:00', - ), -]) -def test_token_date(dt, content): - token = create_primitive_token(dt) - assert token == tokens.Token(tokens.TYPE_DATE, content) From b27d6a771081f7f88dd3d0005e0d60bbbf0ee53d Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 11:04:42 +0800 Subject: [PATCH 06/71] clear references in patched.txt --- pipenv/patched/patched.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index 4f3ee409d7..e7dadd8ec9 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -1,5 +1,4 @@ safety -git+https://github.com/jumpscale7/python-consistent-toml.git#egg=contoml crayons==0.1.2 pipfile==0.0.2 pip-tools==3.1.0 From 1555200463cd91babc1197e81bec15357d7f11f4 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 14:59:59 +0800 Subject: [PATCH 07/71] Add back some tests --- tests/unit/test_vendor.py | 49 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_vendor.py b/tests/unit/test_vendor.py index 6514b16290..35458c5342 100644 --- a/tests/unit/test_vendor.py +++ b/tests/unit/test_vendor.py @@ -1,9 +1,15 @@ # -*- coding: utf-8 -*- # We need to import the patched packages directly from sys.path, so the # identity checks can pass. +import pipenv # noqa + +import datetime import os -import pipenv # noqa +import pytest +import pytz +import tomlkit + from pipfile.api import PipfileParser @@ -38,3 +44,44 @@ def test_inject_environment_variables(self): assert parsed_dict["list"][1] == {} assert parsed_dict["bool"] is True assert parsed_dict["none"] is None + + +@pytest.mark.parametrize('dt, content', [ + ( # Date. + datetime.date(1992, 8, 19), + '1992-08-19', + ), + ( # Naive time. + datetime.time(15, 10), + '15:10:00', + ), + ( # Aware time in UTC. + datetime.time(15, 10, tzinfo=pytz.UTC), + '15:10:00+00:00', + ), + ( # Aware local time. + datetime.time(15, 10, tzinfo=pytz.FixedOffset(8 * 60)), + '15:10:00+08:00', + ), + ( # Naive datetime. + datetime.datetime(1992, 8, 19, 15, 10), + '1992-08-19T15:10:00', + ), + ( # Aware datetime in UTC. + datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.UTC), + '1992-08-19T15:10:00Z', + ), + ( # Aware local datetime. + datetime.datetime(1992, 8, 19, 15, 10, tzinfo=pytz.FixedOffset(8 * 60)), + '1992-08-19T15:10:00+08:00', + ), +]) +def test_token_date(dt, content): + item = tomlkit.item(dt) + assert item.as_string() == content + + +def test_dump_nonascii_string(): + content = 'name = "Stažené"\n' + toml_content = tomlkit.dumps(tomlkit.loads(content)) + assert toml_content == content From 53b073c7ffcce7d18dd3b459932d49f783546434 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 15:20:47 +0800 Subject: [PATCH 08/71] python 2.7 unicode --- pipenv/project.py | 2 +- tests/unit/test_vendor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index 13c6619f64..74fe65271f 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -856,7 +856,7 @@ def write_toml(self, data, path=None): formatted_data = tomlkit.dumps(data).rstrip() else: encoder = toml.encoder.TomlPreserveInlineDictEncoder() - formatted_data = toml.dumps(data, encoder=encoder) + formatted_data = toml.dumps(data, encoder=encoder).rstrip() except Exception: document = tomlkit.document() for section in ("packages", "dev-packages"): diff --git a/tests/unit/test_vendor.py b/tests/unit/test_vendor.py index 35458c5342..aea9311214 100644 --- a/tests/unit/test_vendor.py +++ b/tests/unit/test_vendor.py @@ -82,6 +82,6 @@ def test_token_date(dt, content): def test_dump_nonascii_string(): - content = 'name = "Stažené"\n' + content = u'name = "Stažené"\n' toml_content = tomlkit.dumps(tomlkit.loads(content)) assert toml_content == content From b3aa66b1542200994dcdd5770228568a7f6dbac7 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 17:26:39 +0800 Subject: [PATCH 09/71] make tomlkit dump toml's inline table --- pipenv/project.py | 13 +++++----- pipenv/vendor/tomlkit/items.py | 6 ++++- .../vendor/tomlkit-dump-inline-table.patch | 24 +++++++++++++++++++ 3 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch diff --git a/pipenv/project.py b/pipenv/project.py index 74fe65271f..6d59b4cbf3 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -572,10 +572,15 @@ def clear_pipfile_cache(self): """Clear pipfile cache (e.g., so we can mutate parsed pipfile)""" _pipfile_cache.clear() + @staticmethod + def _is_tomlkit_parsed_result(parsed): + """Check by duck typing of tomlkit.api.Container""" + return hasattr(parsed, "_body") + @staticmethod def convert_outline_table(parsed): """Converts all outline to inline tables""" - if hasattr(parsed, "_body"): # Duck-type that implies tomlkit.api.Container. + if Project._istomlkit_parsed_result(parsed): empty_inline_table = tomlkit.inline_table else: empty_inline_table = toml.TomlDecoder().get_empty_inline_table @@ -852,11 +857,7 @@ def write_toml(self, data, path=None): if path is None: path = self.pipfile_location try: - if hasattr(data, "_body"): - formatted_data = tomlkit.dumps(data).rstrip() - else: - encoder = toml.encoder.TomlPreserveInlineDictEncoder() - formatted_data = toml.dumps(data, encoder=encoder).rstrip() + formatted_data = tomlkit.dumps(data).rstrip() except Exception: document = tomlkit.document() for section in ("packages", "dev-packages"): diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 781e2e9843..80e029d988 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -21,6 +21,7 @@ from pipenv.vendor.backports.functools_lru_cache import lru_cache else: from functools import lru_cache +from toml.decoder import InlineTableDict def item(value, _parent=None): @@ -36,7 +37,10 @@ def item(value, _parent=None): elif isinstance(value, float): return Float(value, Trivia(), str(value)) elif isinstance(value, dict): - val = Table(Container(), Trivia(), False) + if isinstance(value, InlineTableDict): + val = InlineTable(Container(), Trivia()) + else: + val = Table(Container(), Trivia(), False) for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): val[k] = item(v, _parent=val) diff --git a/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch b/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch new file mode 100644 index 0000000000..755bd31a5e --- /dev/null +++ b/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch @@ -0,0 +1,24 @@ +diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py +index 781e2e98..80e029d9 100644 +--- a/pipenv/vendor/tomlkit/items.py ++++ b/pipenv/vendor/tomlkit/items.py +@@ -21,6 +21,7 @@ if PY2: + from pipenv.vendor.backports.functools_lru_cache import lru_cache + else: + from functools import lru_cache ++from toml.decoder import InlineTableDict + + + def item(value, _parent=None): +@@ -36,7 +37,10 @@ def item(value, _parent=None): + elif isinstance(value, float): + return Float(value, Trivia(), str(value)) + elif isinstance(value, dict): +- val = Table(Container(), Trivia(), False) ++ if isinstance(value, InlineTableDict): ++ val = InlineTable(Container(), Trivia()) ++ else: ++ val = Table(Container(), Trivia(), False) + for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): + val[k] = item(v, _parent=val) + From 36f054d3a81790918a9300808c4302f247a31685 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 8 Nov 2018 04:47:26 -0500 Subject: [PATCH 10/71] Grab updates from latest vendored changes Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/models/pyenv.py | 7 +- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 2 +- .../requirementslib/models/baserequirement.py | 37 --------- .../vendor/requirementslib/models/markers.py | 6 +- .../vendor/requirementslib/models/pipfile.py | 39 +++++++-- .../requirementslib/models/requirements.py | 81 +++++++++++++++---- pipenv/vendor/requirementslib/models/utils.py | 5 +- pipenv/vendor/vistir/spin.py | 2 + 10 files changed, 113 insertions(+), 70 deletions(-) delete mode 100644 pipenv/vendor/requirementslib/models/baserequirement.py diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index 0b22546acc..85666b5c28 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = '1.1.7' +__version__ = '1.1.8' # Add NullHandler to "pythonfinder" logger, because Python2's default root # logger has no handler and warnings like this would be reported: diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index 4a8dfc65cd..ac7f8588ac 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -45,13 +45,16 @@ def expanded_paths(self): ) def get_version_order(self): - version_order_file = self.root.joinpath("version").read_text(encoding="utf-8") + version_order_file, version_order_lines = self.root.joinpath("version"), [] + if version_order_file.exists(): + version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() + version_paths = [ p for p in self.root.glob("versions/*") if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} - version_order = [versions[v] for v in version_order_file.splitlines() if v in versions] + version_order = [versions[v] for v in version_order_lines if v in versions] for version in version_order: version_paths.remove(version) version_order += version_paths diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index ec99afe731..24d520b6d2 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -173,7 +173,7 @@ def parse(cls, version): def get_architecture(self): if self.architecture: return self.architecture - arch, _ = platform.architecture(path.path.as_posix()) + arch, _ = platform.architecture(self.comes_from.path.as_posix()) self.architecture = arch return self.architecture diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index ba0ce9ae69..edbab5bc8a 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.5' +__version__ = '1.2.6' import logging diff --git a/pipenv/vendor/requirementslib/models/baserequirement.py b/pipenv/vendor/requirementslib/models/baserequirement.py deleted file mode 100644 index b97dee40a1..0000000000 --- a/pipenv/vendor/requirementslib/models/baserequirement.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -import abc -import attr -import six - - -@six.add_metaclass(abc.ABCMeta) -class BaseRequirement: - @classmethod - def from_line(cls, line): - """Returns a requirement from a requirements.txt or pip-compatible line""" - raise NotImplementedError - - @abc.abstractmethod - def line_part(self): - """Returns the current requirement as a pip-compatible line""" - - @classmethod - def from_pipfile(cls, name, pipfile): - """Returns a requirement from a pipfile entry""" - raise NotImplementedError - - @abc.abstractmethod - def pipfile_part(self): - """Returns the current requirement as a pipfile entry""" - - @classmethod - def attr_fields(cls): - return [field.name for field in attr.fields(cls)] - - @property - def extras_as_pip(self): - if self.extras: - return "[{0}]".format(",".join(self.extras)) - - return "" diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 83b44b6344..70fe3bc035 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -4,12 +4,11 @@ from packaging.markers import InvalidMarker, Marker from ..exceptions import RequirementError -from .baserequirement import BaseRequirement from .utils import filter_none, validate_markers @attr.s -class PipenvMarkers(BaseRequirement): +class PipenvMarkers(object): """System-level requirements - see PEP508 for more detail""" os_name = attr.ib( @@ -78,7 +77,8 @@ def from_line(cls, line): @classmethod def from_pipfile(cls, name, pipfile): - found_keys = [k for k in pipfile.keys() if k in cls.attr_fields()] + attr_fields = [field.name for field in attr.fields(cls)] + found_keys = [k for k in pipfile.keys() if k in attr_fields] marker_strings = ["{0} {1}".format(k, pipfile[k]) for k in found_keys] if pipfile.get("markers"): marker_strings.append(pipfile.get("markers")) diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index fe7743c2ca..58d540559e 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -38,12 +38,16 @@ def load(cls, f, encoding=None): content = content.decode(encoding) _data = tomlkit.loads(content) if "source" not in _data: - # HACK: There is no good way to prepend a section to an existing - # TOML document, but there's no good way to copy non-structural - # content from one TOML document to another either. Modify the - # TOML content directly, and load the new in-memory document. - sep = "" if content.startswith("\n") else "\n" - content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content + if "sources" in _data: + _data["source"] = _data["sources"] + content = tomlkit.dumps(_data) + else: + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content data = tomlkit.loads(content) return cls(data) @@ -53,6 +57,8 @@ class Pipfile(object): path = attr.ib(validator=is_path, type=Path) projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) _pipfile = attr.ib(type=plette.pipfiles.Pipfile) + _pyproject = attr.ib(default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument) + build_system = attr.ib(default=attr.Factory(dict), type=dict) requirements = attr.ib(default=attr.Factory(list), type=list) dev_requirements = attr.ib(default=attr.Factory(list), type=list) @@ -212,3 +218,24 @@ def packages(self, as_requirements=True): if as_requirements: return self.requirements return self._pipfile.get('packages', {}) + + def _read_pyproject(self): + pyproject = self.path.parent.joinpath("pyproject.toml") + if pyproject.exists(): + self._pyproject = tomlkit.load(pyproject) + build_system = self._pyproject.get("build-system", None) + if not os.path.exists(self.path_to("setup.py")): + if not build_system or not build_system.get("requires"): + build_system = { + "requires": ["setuptools>=38.2.5", "wheel"], + "build-backend": "setuptools.build_meta", + } + self._build_system = build_system + + @property + def build_requires(self): + return self.build_system.get("requires", []) + + @property + def build_backend(self): + return self.build_system.get("build-backend", None) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 8d087d2309..ce2b0927f7 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -36,7 +36,6 @@ add_ssh_scheme_to_git_uri, strip_ssh_from_git_uri, ) -from .baserequirement import BaseRequirement from .utils import ( HASH_STRING, build_vcs_link, @@ -61,7 +60,7 @@ @attr.s(slots=True) -class NamedRequirement(BaseRequirement): +class NamedRequirement(object): name = attr.ib() version = attr.ib(validator=attr.validators.optional(validate_specifiers)) req = attr.ib() @@ -98,7 +97,8 @@ def from_line(cls, line): def from_pipfile(cls, name, pipfile): creation_args = {} if hasattr(pipfile, "keys"): - creation_args = {k: v for k, v in pipfile.items() if k in cls.attr_fields()} + attr_fields = [field.name for field in attr.fields(cls)] + creation_args = {k: v for k, v in pipfile.items() if k in attr_fields} creation_args["name"] = name version = get_version(pipfile) extras = creation_args.get("extras", None) @@ -131,7 +131,7 @@ def pipfile_part(self): @attr.s(slots=True) -class FileRequirement(BaseRequirement): +class FileRequirement(object): """File requirements for tar.gz installable files or wheels or setup.py containing directories.""" @@ -152,6 +152,8 @@ class FileRequirement(BaseRequirement): name = attr.ib() #: A :class:`~pkg_resources.Requirement` isntance req = attr.ib() + #: Whether this is a direct url requirement + is_direct = attr.ib(default=False) _uri_scheme = attr.ib(default=None) @classmethod @@ -256,11 +258,17 @@ def get_link_from_line(cls, line): return LinkInfo(vcs_type, prefer, relpath, path, uri, link) + def __attrs_post_init__(self): + if self.req and getattr(self.req, "url"): + self.uri = self.req.url + @uri.default def get_uri(self): if self.path and not self.uri: self._uri_scheme = "path" self.uri = pip_shims.shims.path_to_url(os.path.abspath(self.path)) + elif self.req and getattr(self.req, "url"): + self.uri = self.req.url @name.default def get_name(self): @@ -268,6 +276,8 @@ def get_name(self): if loc: self._uri_scheme = "path" if self.path else "uri" name = None + if self.req and getattr(self.req, "name"): + return self.req.name if self.link and self.link.egg_fragment: return self.link.egg_fragment elif self.link and self.link.is_wheel: @@ -326,9 +336,18 @@ def get_link(self): @req.default def get_requirement(self): - req = init_requirement(normalize_name(self.name)) - req.editable = False - req.line = self.link.url_without_fragment + if self.link.is_artifact and not self.editable: + if self._uri_scheme == "uri": + if self.name: + req_str = "{0} @{1}".format(self.name, self.link.url_without_fragment) + else: + req_str = "{0}".format(self.link.url_without_fragment) + req = init_requirement(req_str) + req.line = req_str + else: + req = init_requirement(normalize_name(self.name)) + req.editable = False + req.line = self.link.url_without_fragment if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path @@ -337,7 +356,8 @@ def get_requirement(self): else: req.local_file = False req.path = None - req.url = self.link.url_without_fragment + if not getattr(req, "url", None): + req.url = self.link.url_without_fragment if self.editable: req.editable = True req.link = self.link @@ -351,9 +371,13 @@ def is_remote_artifact(self): for scheme in ("http", "https", "ftp", "ftps", "uri") ) and (self.link.is_artifact or self.link.is_wheel) - and not self.req.editable + and not self.editable ) + @property + def is_direct_url(self): + return self.is_remote_artifact + @property def formatted_path(self): if self.path: @@ -371,10 +395,18 @@ def from_line(cls, line): editable = line.startswith("-e ") line = line.split(" ", 1)[1] if editable else line setup_path = None + name = None + req = None if not any([is_installable_file(line), is_valid_url(line), is_file_url(line)]): - raise RequirementError( - "Supplied requirement is not installable: {0!r}".format(line) - ) + try: + req = init_requirement(line) + except Exception: + raise RequirementError( + "Supplied requirement is not installable: {0!r}".format(line) + ) + else: + name = getattr(req, "name", None) + line = getattr(req, "url", None) vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) setup_path = Path(path) / "setup.py" if path else None arg_dict = { @@ -389,8 +421,12 @@ def from_line(cls, line): from pip_shims import Wheel arg_dict["name"] = Wheel(link.filename).name + elif name: + arg_dict["name"] = name elif link.egg_fragment: arg_dict["name"] = link.egg_fragment + if req: + arg_dict["req"] = req created = cls(**arg_dict) return created @@ -428,7 +464,9 @@ def from_pipfile(cls, name, pipfile): if not uri: uri = pip_shims.shims.path_to_url(path) link = create_link(uri) - + req = None + if link.is_artifact and not link.is_wheel and not link.scheme.startswith("file"): + req = init_requirement("{0}@{1}".format(name, uri)) arg_dict = { "name": name, "path": path, @@ -437,6 +475,8 @@ def from_pipfile(cls, name, pipfile): "link": link, "uri_scheme": uri_scheme, } + if req: + arg_dict["req"] = req return cls(**arg_dict) @property @@ -449,7 +489,10 @@ def line_part(self): seed = unquote(self.link.url_without_fragment) or self.uri # add egg fragments to remote artifacts (valid urls only) if not self._has_hashed_name and self.is_remote_artifact: - seed += "#egg={0}".format(self.name) + if not self.link.is_wheel and self.link.is_artifact: + seed = "{0}@{1}".format(self.name, seed) + else: + seed += "#egg={0}".format(self.name) editable = "-e " if self.editable else "" return "{0}{1}".format(editable, seed) @@ -575,7 +618,8 @@ def get_requirement(self): ) req = init_requirement(canonicalize_name(self.name)) req.editable = self.editable - req.url = self.uri + if not getattr(req, "url") and self.uri: + req.url = self.uri req.line = self.link.url if self.ref: req.revision = self.ref @@ -813,7 +857,7 @@ def pipfile_part(self): class Requirement(object): name = attr.ib() vcs = attr.ib(default=None, validator=attr.validators.optional(validate_vcs)) - req = attr.ib(default=None, validator=optional_instance_of(BaseRequirement)) + req = attr.ib(default=None) markers = attr.ib(default=None) specifiers = attr.ib(validator=attr.validators.optional(validate_specifiers)) index = attr.ib(default=None) @@ -915,8 +959,11 @@ def from_line(cls, line): # Installable local files and installable non-vcs urls are handled # as files, generally speaking line_is_vcs = is_vcs(line) + # check for pep-508 compatible requirements + name, _, possible_url = line.partition("@") if is_installable_file(line) or ( - (is_file_url(line) or is_valid_url(line)) and not line_is_vcs + (is_valid_url(possible_url) or is_file_url(line) or is_valid_url(line)) and + not (line_is_vcs or is_vcs(possible_url)) ): r = FileRequirement.from_line(line_with_prefix) elif line_is_vcs: diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index fbaaf1a417..aa7ffd681c 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -424,17 +424,18 @@ def make_install_requirement(name, version, extras, markers, constraint=False): """ # If no extras are specified, the extras string is blank + from pip_shims.shims import install_req_from_line extras_string = "" if extras: # Sort extras for stability extras_string = "[{}]".format(",".join(sorted(extras))) if not markers: - return ireq_from_line( + return install_req_from_line( str('{}{}=={}'.format(name, extras_string, version)), constraint=constraint) else: - return ireq_from_line( + return install_req_from_line( str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), constraint=constraint) diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index f8c4e0095c..f0d9e77ffd 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -153,6 +153,7 @@ def __init__(self, *args, **kwargs): def ok(self, text="OK"): """Set Ok (success) finalizer to a spinner.""" + # Do not display spin text for ok state self._text = None _text = text if text else "OK" @@ -160,6 +161,7 @@ def ok(self, text="OK"): def fail(self, text="FAIL"): """Set fail finalizer to a spinner.""" + # Do not display spin text for fail state self._text = None _text = text if text else "FAIL" From 65d70905ee8d8e79bbe8ba8c5d5b72573f37e201 Mon Sep 17 00:00:00 2001 From: frostming Date: Thu, 8 Nov 2018 17:52:05 +0800 Subject: [PATCH 11/71] Force inline table --- pipenv/project.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pipenv/project.py b/pipenv/project.py index 6d59b4cbf3..4f947e4b18 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -580,7 +580,7 @@ def _is_tomlkit_parsed_result(parsed): @staticmethod def convert_outline_table(parsed): """Converts all outline to inline tables""" - if Project._istomlkit_parsed_result(parsed): + if Project._is_tomlkit_parsed_result(parsed): empty_inline_table = tomlkit.inline_table else: empty_inline_table = toml.TomlDecoder().get_empty_inline_table @@ -856,6 +856,7 @@ def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" if path is None: path = self.pipfile_location + data = self.convert_outline_table(data) try: formatted_data = tomlkit.dumps(data).rstrip() except Exception: From 5b496705a095db7cbb89065fce0b90fe8e9e3f42 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 8 Nov 2018 13:13:54 -0500 Subject: [PATCH 12/71] Fix broken requirementslib updates Signed-off-by: Dan Ryan --- .../requirementslib/models/requirements.py | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index ce2b0927f7..a36e5ba46c 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -143,6 +143,7 @@ class FileRequirement(object): editable = attr.ib(default=False) #: Extras if applicable extras = attr.ib(default=attr.Factory(list)) + _uri_scheme = attr.ib(default=None) #: URI of the package uri = attr.ib() #: Link object representing the package to clone @@ -154,7 +155,6 @@ class FileRequirement(object): req = attr.ib() #: Whether this is a direct url requirement is_direct = attr.ib(default=False) - _uri_scheme = attr.ib(default=None) @classmethod def get_link_from_line(cls, line): @@ -276,7 +276,7 @@ def get_name(self): if loc: self._uri_scheme = "path" if self.path else "uri" name = None - if self.req and getattr(self.req, "name"): + if getattr(self, "req", None) and getattr(self.req, "name"): return self.req.name if self.link and self.link.egg_fragment: return self.link.egg_fragment @@ -339,25 +339,27 @@ def get_requirement(self): if self.link.is_artifact and not self.editable: if self._uri_scheme == "uri": if self.name: - req_str = "{0} @{1}".format(self.name, self.link.url_without_fragment) + req_str = "{0} @ {1}".format(self.name, self.link.url_without_fragment) else: req_str = "{0}".format(self.link.url_without_fragment) req = init_requirement(req_str) req.line = req_str + else: + req = init_requirement(normalize_name(self.name)) else: req = init_requirement(normalize_name(self.name)) req.editable = False req.line = self.link.url_without_fragment - if self.path and self.link and self.link.scheme.startswith("file"): - req.local_file = True - req.path = self.path - req.url = None - self._uri_scheme = "file" - else: - req.local_file = False - req.path = None - if not getattr(req, "url", None): - req.url = self.link.url_without_fragment + if self.path and self.link and self.link.scheme.startswith("file"): + req.local_file = True + req.path = self.path + req.url = None + self._uri_scheme = "file" + else: + req.local_file = False + req.path = None + if not getattr(req, "url", None): + req.url = self.link.url_without_fragment if self.editable: req.editable = True req.link = self.link @@ -1148,7 +1150,7 @@ def get_requirement(self): req.line = req_line req.specifier = SpecifierSet(self.specifiers if self.specifiers else "") if self.is_vcs or self.is_file_or_url: - req.url = self.req.link.url_without_fragment + req.url = getattr(self.req.req, "url", self.req.link.url_without_fragment) req.marker = self.get_markers() req.extras = set(self.extras) if self.extras else set() return req From 3490fc85efc652f9f20dedd2c3dc6f5d1a2087e1 Mon Sep 17 00:00:00 2001 From: frostming Date: Fri, 9 Nov 2018 11:23:02 +0800 Subject: [PATCH 13/71] fix patch --- pipenv/vendor/tomlkit/container.py | 3 +-- .../patches/vendor/tomlkit-update-items.patch | 11 +++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index 5ddd72e7b8..987a079036 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -196,8 +196,7 @@ def remove(self, key): # type: (Union[Key, str]) -> Container self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) else: self._body[idx] = (None, Null()) - super(Container, self).__delitem__(key.key) - + super(Container, self).__delitem__(key.key) return self diff --git a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch b/tasks/vendoring/patches/vendor/tomlkit-update-items.patch index 51f6006a94..c996cb9609 100644 --- a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch +++ b/tasks/vendoring/patches/vendor/tomlkit-update-items.patch @@ -9,11 +9,11 @@ index 37014921..5ddd72e7 100644 +from .items import Trivia from .items import Whitespace from .items import item as _item - + @@ -189,9 +190,14 @@ class Container(dict): if idx is None: raise NonExistentKey(key) - + - self._body[idx] = (None, Null()) + old_data = self._body[idx][1] + trivia = getattr(old_data, "trivia", None) @@ -21,9 +21,8 @@ index 37014921..5ddd72e7 100644 + self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) + else: + self._body[idx] = (None, Null()) -+ super(Container, self).__delitem__(key.key) - ++ super(Container, self).__delitem__(key.key) + - super(Container, self).__delitem__(key.key) - + return self - From de78c1efce2da437ed39f014509f576305bb0f2f Mon Sep 17 00:00:00 2001 From: frostming Date: Fri, 9 Nov 2018 12:59:51 +0800 Subject: [PATCH 14/71] Only convert outline tables when write toml --- pipenv/project.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index f58b9c585e..bed430af91 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -585,25 +585,28 @@ def convert_outline_table(parsed): else: empty_inline_table = toml.TomlDecoder().get_empty_inline_table for section in ("packages", "dev-packages"): + has_outline_table = False table_data = parsed.get(section, {}).copy() for package, value in table_data.items(): - if hasattr(value, "keys"): + if hasattr(value, "keys") and not isinstance( + value, (tomlkit.items.InlineTable, toml.decoder.InlineTableDict) + ): + has_outline_table = True table = empty_inline_table() table.update(value) table_data[package] = table - parsed[section] = table_data + if has_outline_table: + # We'll lose comments here, only update when necessary + parsed[section] = table_data return parsed def _parse_pipfile(self, contents): try: - data = tomlkit.parse(contents) + return tomlkit.parse(contents) except Exception: # We lose comments here, but it's for the best.) # Fallback to toml parser, for large files. - data = toml.loads(contents) - if "[packages." in contents or "[dev-packages." in contents: - data = self.convert_outline_table(data) - return data + return toml.loads(contents) def _read_pyproject(self): pyproject = self.path_to("pyproject.toml") From accd0ea4abdb1cbe35c094e01cc93b36147c44a3 Mon Sep 17 00:00:00 2001 From: frostming Date: Fri, 9 Nov 2018 14:18:04 +0800 Subject: [PATCH 15/71] Move to utils function --- pipenv/project.py | 31 ++----------------- pipenv/utils.py | 25 +++++++++++++++ .../patches/vendor/tomlkit-update-items.patch | 15 +++++---- 3 files changed, 34 insertions(+), 37 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index bed430af91..04b519ed42 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -25,6 +25,7 @@ find_requirements, is_editable, cleanup_toml, + convert_toml_outline_tables, is_installable_file, is_valid_url, normalize_drive, @@ -572,34 +573,6 @@ def clear_pipfile_cache(self): """Clear pipfile cache (e.g., so we can mutate parsed pipfile)""" _pipfile_cache.clear() - @staticmethod - def _is_tomlkit_parsed_result(parsed): - """Check by duck typing of tomlkit.api.Container""" - return hasattr(parsed, "_body") - - @staticmethod - def convert_outline_table(parsed): - """Converts all outline to inline tables""" - if Project._is_tomlkit_parsed_result(parsed): - empty_inline_table = tomlkit.inline_table - else: - empty_inline_table = toml.TomlDecoder().get_empty_inline_table - for section in ("packages", "dev-packages"): - has_outline_table = False - table_data = parsed.get(section, {}).copy() - for package, value in table_data.items(): - if hasattr(value, "keys") and not isinstance( - value, (tomlkit.items.InlineTable, toml.decoder.InlineTableDict) - ): - has_outline_table = True - table = empty_inline_table() - table.update(value) - table_data[package] = table - if has_outline_table: - # We'll lose comments here, only update when necessary - parsed[section] = table_data - return parsed - def _parse_pipfile(self, contents): try: return tomlkit.parse(contents) @@ -860,7 +833,7 @@ def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" if path is None: path = self.pipfile_location - data = self.convert_outline_table(data) + data = convert_toml_outline_tables(data) try: formatted_data = tomlkit.dumps(data).rstrip() except Exception: diff --git a/pipenv/utils.py b/pipenv/utils.py index 8674aee207..9d495fa367 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -7,6 +7,8 @@ import shutil import stat import sys +import toml +import tomlkit import warnings import crayons @@ -93,6 +95,29 @@ def cleanup_toml(tml): return toml +def convert_toml_outline_tables(parsed): + """Converts all outline tables to inline tables.""" + if isinstance(parsed, tomlkit.container.Container): + empty_inline_table = tomlkit.inline_table + else: + empty_inline_table = toml.TomlDecoder().get_empty_inline_table + for section in ("packages", "dev-packages"): + has_outline_table = False + table_data = parsed.get(section, {}).copy() + for package, value in table_data.items(): + if hasattr(value, "keys") and not isinstance( + value, (tomlkit.items.InlineTable, toml.decoder.InlineTableDict) + ): + has_outline_table = True + table = empty_inline_table() + table.update(value) + table_data[package] = table + if has_outline_table: + # We'll lose comments here, only update when necessary + parsed[section] = table_data + return parsed + + def parse_python_version(output): """Parse a Python version output returned by `python --version`. diff --git a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch b/tasks/vendoring/patches/vendor/tomlkit-update-items.patch index c996cb9609..ed2fb95eb9 100644 --- a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch +++ b/tasks/vendoring/patches/vendor/tomlkit-update-items.patch @@ -1,5 +1,5 @@ diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py -index 37014921..5ddd72e7 100644 +index 37014921..987a0790 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -9,6 +9,7 @@ from .items import Item @@ -9,20 +9,19 @@ index 37014921..5ddd72e7 100644 +from .items import Trivia from .items import Whitespace from .items import item as _item - -@@ -189,9 +190,14 @@ class Container(dict): + +@@ -189,8 +190,12 @@ class Container(dict): if idx is None: raise NonExistentKey(key) - + - self._body[idx] = (None, Null()) +- + old_data = self._body[idx][1] + trivia = getattr(old_data, "trivia", None) + if trivia and getattr(trivia, "comment", None): + self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) + else: + self._body[idx] = (None, Null()) -+ super(Container, self).__delitem__(key.key) - -- super(Container, self).__delitem__(key.key) - + super(Container, self).__delitem__(key.key) + return self From 6df7d8861da841e552049dcde9ff9a0f23edc01e Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Sat, 10 Nov 2018 12:34:54 +0800 Subject: [PATCH 16/71] update tomlkit --- Pipfile.lock | 6 +- pipenv/vendor/tomlkit/__init__.py | 2 +- pipenv/vendor/tomlkit/_compat.py | 2 + pipenv/vendor/tomlkit/_utils.py | 17 +- pipenv/vendor/tomlkit/api.py | 2 + pipenv/vendor/tomlkit/container.py | 129 ++++-- pipenv/vendor/tomlkit/exceptions.py | 70 +++- pipenv/vendor/tomlkit/items.py | 53 ++- pipenv/vendor/tomlkit/parser.py | 586 +++++++++++++++------------- pipenv/vendor/tomlkit/source.py | 195 +++++++++ pipenv/vendor/tomlkit/toml_char.py | 19 +- pipenv/vendor/tomlkit/toml_file.py | 3 + pipenv/vendor/vendor.txt | 1 + 13 files changed, 742 insertions(+), 343 deletions(-) create mode 100644 pipenv/vendor/tomlkit/source.py diff --git a/Pipfile.lock b/Pipfile.lock index 3990a45167..30a618ac03 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -626,10 +626,10 @@ }, "tomlkit": { "hashes": [ - "sha256:8ab16e93162fc44d3ad83d2aa29a7140b8f7d996ae1790a73b9a7aed6fb504ac", - "sha256:ca181cee7aee805d455628f7c94eb8ae814763769a93e69157f250fe4ebe1926" + "sha256:82a8fbb8d8c6af72e96ba00b9db3e20ef61be6c79082552c9363f4559702258b", + "sha256:a43e0195edc9b3c198cd4b5f0f3d427a395d47c4a76ceba7cc875ed030756c39" ], - "version": "==0.4.4" + "version": "==0.5.2" }, "towncrier": { "editable": true, diff --git a/pipenv/vendor/tomlkit/__init__.py b/pipenv/vendor/tomlkit/__init__.py index 89e4cf595e..92bfa27cbc 100644 --- a/pipenv/vendor/tomlkit/__init__.py +++ b/pipenv/vendor/tomlkit/__init__.py @@ -22,4 +22,4 @@ from .api import ws -__version__ = "0.4.6" +__version__ = "0.5.2" diff --git a/pipenv/vendor/tomlkit/_compat.py b/pipenv/vendor/tomlkit/_compat.py index f94bb10e26..b7407af696 100644 --- a/pipenv/vendor/tomlkit/_compat.py +++ b/pipenv/vendor/tomlkit/_compat.py @@ -141,9 +141,11 @@ def _name_from_offset(delta): if PY2: unicode = unicode chr = unichr + long = long else: unicode = str chr = chr + long = int def decode(string, encodings=None): diff --git a/pipenv/vendor/tomlkit/_utils.py b/pipenv/vendor/tomlkit/_utils.py index f62a354afd..0a68be9f16 100644 --- a/pipenv/vendor/tomlkit/_utils.py +++ b/pipenv/vendor/tomlkit/_utils.py @@ -9,19 +9,30 @@ from ._compat import decode from ._compat import timezone +RFC_3339_LOOSE = re.compile( + "^" + r"(([0-9]+)-(\d{2})-(\d{2}))?" # Date + "(" + "([T ])?" # Separator + r"(\d{2}):(\d{2}):(\d{2})(\.([0-9]+))?" # Time + r"((Z)|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone + ")?" + "$" +) + RFC_3339_DATETIME = re.compile( "^" "([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])" # Date "[T ]" # Separator - "([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?" # Time - "((Z)|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone + r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?" # Time + r"((Z)|([\+|\-]([01][0-9]|2[0-3]):([0-5][0-9])))?" # Timezone "$" ) RFC_3339_DATE = re.compile("^([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$") RFC_3339_TIME = re.compile( - "^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?$" + r"^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.([0-9]+))?$" ) _utc = timezone(timedelta(), "UTC") diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py index 0ac2675262..e541c20c17 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py @@ -1,5 +1,7 @@ import datetime as _datetime +from typing import Tuple + from ._utils import parse_rfc3339 from .container import Container from .items import AoT diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index 987a079036..cb8af1d522 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -1,5 +1,13 @@ from __future__ import unicode_literals +from typing import Any +from typing import Dict +from typing import Generator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + from ._compat import decode from .exceptions import KeyAlreadyPresent from .exceptions import NonExistentKey @@ -9,7 +17,6 @@ from .items import Key from .items import Null from .items import Table -from .items import Trivia from .items import Whitespace from .items import item as _item @@ -74,7 +81,7 @@ def add( return self.append(key, item) - def append(self, key, item): # type: (Union[Key, str], Item) -> Container + def append(self, key, item): # type: (Union[Key, str, None], Item) -> Container if not isinstance(key, Key) and key is not None: key = Key(key) @@ -99,7 +106,11 @@ def append(self, key, item): # type: (Union[Key, str], Item) -> Container self.append(None, Whitespace("\n")) if key is not None and key in self: - current = self._body[self._map[key]][1] + current_idx = self._map[key] + if isinstance(current_idx, tuple): + current_idx = current_idx[0] + + current = self._body[current_idx][1] if isinstance(item, Table): if not isinstance(current, (Table, AoT)): raise KeyAlreadyPresent(key) @@ -121,7 +132,7 @@ def append(self, key, item): # type: (Union[Key, str], Item) -> Container current.append(k, v) return self - else: + elif not item.is_super_table(): raise KeyAlreadyPresent(key) elif isinstance(item, AoT): if not isinstance(current, AoT): @@ -173,7 +184,23 @@ def append(self, key, item): # type: (Union[Key, str], Item) -> Container else: return self._insert_at(0, key, item) - self._map[key] = len(self._body) + if key in self._map: + current_idx = self._map[key] + if isinstance(current_idx, tuple): + current_idx = current_idx[0] + + current = self._body[current_idx][1] + if key is not None and not isinstance(current, Table): + raise KeyAlreadyPresent(key) + + # Adding sub tables to a currently existing table + idx = self._map[key] + if not isinstance(idx, tuple): + idx = (idx,) + + self._map[key] = idx + (len(self._body),) + else: + self._map[key] = len(self._body) self._body.append((key, item)) @@ -190,12 +217,12 @@ def remove(self, key): # type: (Union[Key, str]) -> Container if idx is None: raise NonExistentKey(key) - old_data = self._body[idx][1] - trivia = getattr(old_data, "trivia", None) - if trivia and getattr(trivia, "comment", None): - self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) + if isinstance(idx, tuple): + for i in idx: + self._body[i] = (None, Null()) else: self._body[idx] = (None, Null()) + super(Container, self).__delitem__(key.key) return self @@ -224,7 +251,16 @@ def _insert_after( # Increment indices after the current index for k, v in self._map.items(): - if v > idx: + if isinstance(v, tuple): + new_indices = [] + for v_ in v: + if v_ > idx: + v_ = v_ + 1 + + new_indices.append(v_) + + self._map[k] = tuple(new_indices) + elif v > idx: self._map[k] = v + 1 self._map[other_key] = idx + 1 @@ -257,7 +293,16 @@ def _insert_at( # Increment indices after the current index for k, v in self._map.items(): - if v >= idx: + if isinstance(v, tuple): + new_indices = [] + for v_ in v: + if v_ >= idx: + v_ = v_ + 1 + + new_indices.append(v_) + + self._map[k] = tuple(new_indices) + elif v >= idx: self._map[k] = v + 1 self._map[key] = idx @@ -286,29 +331,7 @@ def as_string(self, prefix=None): # type: () -> str s = "" for k, v in self._body: if k is not None: - if False: - key = k.as_string() - - for _k, _v in v.value.body: - if _k is None: - s += v.as_string() - elif isinstance(_v, Table): - s += v.as_string(prefix=key) - else: - _key = key - if prefix is not None: - _key = prefix + "." + _key - - s += "{}{}{}{}{}{}{}".format( - _v.trivia.indent, - _key + "." + decode(_k.as_string()), - _k.sep, - decode(_v.as_string()), - _v.trivia.comment_ws, - decode(_v.trivia.comment), - _v.trivia.trail, - ) - elif isinstance(v, Table): + if isinstance(v, Table): s += self._render_table(k, v) elif isinstance(v, AoT): s += self._render_aot(k, v) @@ -332,7 +355,12 @@ def _render_table( if prefix is not None: _key = prefix + "." + _key - if not table.is_super_table(): + if not table.is_super_table() or ( + any( + not isinstance(v, (Table, AoT, Whitespace)) for _, v in table.value.body + ) + and not key.is_dotted() + ): open_, close = "[", "]" if table.is_aot_element(): open_, close = "[[", "]]" @@ -465,7 +493,7 @@ def __contains__(self, key): # type: (Union[Key, str]) -> bool return key in self._map - def __getitem__(self, key): # type: (Union[Key, str]) -> Item + def __getitem__(self, key): # type: (Union[Key, str]) -> Union[Item, Container] if not isinstance(key, Key): key = Key(key) @@ -473,6 +501,20 @@ def __getitem__(self, key): # type: (Union[Key, str]) -> Item if idx is None: raise NonExistentKey(key) + if isinstance(idx, tuple): + container = Container(True) + + for i in idx: + item = self._body[i][1] + + if isinstance(item, Table): + for k, v in item.value.body: + container.append(k, v) + else: + container.append(key, item) + + return container + item = self._body[idx][1] return item.value @@ -503,11 +545,20 @@ def _replace( def _replace_at( self, idx, new_key, value - ): # type: (int, Union[Key, str], Item) -> None + ): # type: (Union[int, Tuple[int]], Union[Key, str], Item) -> None + if isinstance(idx, tuple): + for i in idx[1:]: + self._body[i] = (None, Null()) + + idx = idx[0] + k, v = self._body[idx] self._map[new_key] = self._map.pop(k) + if isinstance(self._map[new_key], tuple): + self._map[new_key] = self._map[new_key][0] + value = _item(value) # Copying trivia @@ -517,6 +568,10 @@ def _replace_at( value.trivia.comment = v.trivia.comment value.trivia.trail = v.trivia.trail + if isinstance(value, Table): + # Insert a cosmetic new line for tables + value.append(None, Whitespace("\n")) + self._body[idx] = (new_key, value) super(Container, self).__setitem__(new_key.key, value.value) diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index 46ee938b45..4fbc667bdc 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -1,3 +1,6 @@ +from typing import Optional + + class TOMLKitError(Exception): pass @@ -23,6 +26,14 @@ def __init__( "{} at line {} col {}".format(message, self._line, self._col) ) + @property + def line(self): + return self._line + + @property + def col(self): + return self._col + class MixedArrayTypesError(ParseError): """ @@ -35,6 +46,50 @@ def __init__(self, line, col): # type: (int, int) -> None super(MixedArrayTypesError, self).__init__(line, col, message=message) +class InvalidNumberError(ParseError): + """ + A numeric field was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid number" + + super(InvalidNumberError, self).__init__(line, col, message=message) + + +class InvalidDateTimeError(ParseError): + """ + A datetime field was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid datetime" + + super(InvalidDateTimeError, self).__init__(line, col, message=message) + + +class InvalidDateError(ParseError): + """ + A date field was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid date" + + super(InvalidDateError, self).__init__(line, col, message=message) + + +class InvalidTimeError(ParseError): + """ + A date field was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid time" + + super(InvalidTimeError, self).__init__(line, col, message=message) + + class InvalidNumberOrDateError(ParseError): """ A numeric or date field was improperly specified. @@ -46,6 +101,17 @@ def __init__(self, line, col): # type: (int, int) -> None super(InvalidNumberOrDateError, self).__init__(line, col, message=message) +class InvalidUnicodeValueError(ParseError): + """ + A unicode code was improperly specified. + """ + + def __init__(self, line, col): # type: (int, int) -> None + message = "Invalid unicode value" + + super(InvalidUnicodeValueError, self).__init__(line, col, message=message) + + class UnexpectedCharError(ParseError): """ An unexpected character was found during parsing. @@ -106,7 +172,9 @@ class InternalParserError(ParseError): An error that indicates a bug in the parser. """ - def __init__(self, line, col, message=None): # type: (int, int) -> None + def __init__( + self, line, col, message=None + ): # type: (int, int, Optional[str]) -> None msg = "Internal parser error" if message: msg += " ({})".format(message) diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index c3c2d59ff5..375b5f0251 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -6,14 +6,18 @@ from datetime import date from datetime import datetime from datetime import time -import sys -if sys.version_info >= (3, 4): - from enum import Enum -else: - from pipenv.vendor.backports.enum import Enum +from enum import Enum +from typing import Any +from typing import Dict +from typing import Generator +from typing import List +from typing import Optional +from typing import Union + from ._compat import PY2 from ._compat import decode +from ._compat import long from ._compat import unicode from ._utils import escape_string @@ -21,7 +25,6 @@ from pipenv.vendor.backports.functools_lru_cache import lru_cache else: from functools import lru_cache -from toml.decoder import InlineTableDict def item(value, _parent=None): @@ -37,10 +40,7 @@ def item(value, _parent=None): elif isinstance(value, float): return Float(value, Trivia(), str(value)) elif isinstance(value, dict): - if isinstance(value, InlineTableDict): - val = InlineTable(Container(), Trivia()) - else: - val = Table(Container(), Trivia(), False) + val = Table(Container(), Trivia(), False) for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): val[k] = item(v, _parent=val) @@ -124,6 +124,24 @@ def toggle(self): # type: () -> StringType }[self] +class BoolType(Enum): + TRUE = "true" + FALSE = "false" + + @lru_cache(maxsize=None) + def __bool__(self): + return {BoolType.TRUE: True, BoolType.FALSE: False}[self] + + if PY2: + __nonzero__ = __bool__ # for PY2 + + def __iter__(self): + return iter(self.value) + + def __len__(self): + return len(self.value) + + class Trivia: """ Trivia information (aka metadata). @@ -310,7 +328,7 @@ def __str__(self): # type: () -> str return "{}{}".format(self._trivia.indent, decode(self._trivia.comment)) -class Integer(int, Item): +class Integer(long, Item): """ An integer literal. """ @@ -449,10 +467,10 @@ class Bool(Item): A boolean literal. """ - def __init__(self, value, trivia): # type: (float, Trivia) -> None + def __init__(self, t, trivia): # type: (float, Trivia) -> None super(Bool, self).__init__(trivia) - self._value = value + self._value = bool(t) @property def discriminant(self): # type: () -> int @@ -747,10 +765,6 @@ def value(self): # type: () -> tomlkit.container.Container def discriminant(self): # type: () -> int return 9 - @property - def value(self): # type: () -> tomlkit.container.Container - return self._value - def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item if item is None: if not isinstance(key, (Comment, Whitespace)): @@ -924,6 +938,8 @@ def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable if not isinstance(_item, (Whitespace, Comment)): if not _item.trivia.indent and len(self._value) > 0: _item.trivia.indent = " " + if _item.trivia.comment: + _item.trivia.comment = "" self._value.append(key, _item) @@ -1003,8 +1019,7 @@ def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None if key is not None: super(InlineTable, self).__setitem__(key, value) - - if hasattr(value, "trivia") and value.trivia.comment: + if value.trivia.comment: value.trivia.comment = "" m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 7971d9a27c..7b9483313f 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -1,24 +1,31 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -import datetime -import itertools import re import string -from copy import copy +from typing import Any +from typing import Generator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union -from ._compat import PY2 from ._compat import chr from ._compat import decode from ._utils import _escaped +from ._utils import RFC_3339_LOOSE from ._utils import parse_rfc3339 from .container import Container from .exceptions import EmptyKeyError from .exceptions import EmptyTableNameError from .exceptions import InternalParserError from .exceptions import InvalidCharInStringError -from .exceptions import InvalidNumberOrDateError +from .exceptions import InvalidDateTimeError +from .exceptions import InvalidDateError +from .exceptions import InvalidTimeError +from .exceptions import InvalidNumberError +from .exceptions import InvalidUnicodeValueError from .exceptions import MixedArrayTypesError from .exceptions import ParseError from .exceptions import UnexpectedCharError @@ -26,12 +33,14 @@ from .items import AoT from .items import Array from .items import Bool +from .items import BoolType from .items import Comment from .items import Date from .items import DateTime from .items import Float from .items import InlineTable from .items import Integer +from .items import Item from .items import Key from .items import KeyType from .items import Null @@ -41,6 +50,7 @@ from .items import Time from .items import Trivia from .items import Whitespace +from .source import Source from .toml_char import TOMLChar from .toml_document import TOMLDocument @@ -52,68 +62,69 @@ class Parser: def __init__(self, string): # type: (str) -> None # Input to parse - self._src = decode(string) # type: str - # Iterator used for getting characters from src. - self._chars = iter([(i, TOMLChar(c)) for i, c in enumerate(self._src)]) - # Current byte offset into src. - self._idx = 0 - # Current character - self._current = TOMLChar("") # type: TOMLChar - # Index into src between which and idx slices will be extracted - self._marker = 0 + self._src = Source(decode(string)) self._aot_stack = [] - self.inc() + @property + def _state(self): + return self._src.state + + @property + def _idx(self): + return self._src.idx + + @property + def _current(self): + return self._src.current + + @property + def _marker(self): + return self._src.marker def extract(self): # type: () -> str """ Extracts the value between marker and index """ - if self.end(): - return self._src[self._marker :] - else: - return self._src[self._marker : self._idx] + return self._src.extract() - def inc(self, exception=None): # type: () -> bool + def inc(self, exception=None): # type: (Optional[ParseError.__class__]) -> bool """ Increments the parser if the end of the input has not been reached. Returns whether or not it was able to advance. """ - try: - self._idx, self._current = next(self._chars) - - return True - except StopIteration: - self._idx = len(self._src) - self._current = TOMLChar("\0") - - if not exception: - return False - raise exception + return self._src.inc(exception=exception) - def inc_n(self, n, exception=None): # type: (int) -> bool + def inc_n(self, n, exception=None): # type: (int, Optional[ParseError]) -> bool """ Increments the parser by n characters if the end of the input has not been reached. """ - for _ in range(n): - if not self.inc(exception=exception): - return False + return self._src.inc_n(n=n, exception=exception) - return True + def consume(self, chars, min=0, max=-1): + """ + Consume chars until min/max is satisfied is valid. + """ + return self._src.consume(chars=chars, min=min, max=max) def end(self): # type: () -> bool """ Returns True if the parser has reached the end of the input. """ - return self._idx >= len(self._src) or self._current == "\0" + return self._src.end() def mark(self): # type: () -> None """ Sets the marker to the index's current position """ - self._marker = self._idx + self._src.mark() + + def parse_error(self, exception=ParseError, *args): + """ + Creates a generic "parse error" at the current position. + """ + return self._src.parse_error(exception, *args) def parse(self): # type: () -> TOMLDocument body = TOMLDocument(True) @@ -173,27 +184,6 @@ def _merge_ws(self, item, container): # type: (Item, Container) -> bool return True - def parse_error(self, kind=ParseError, args=None): # type: () -> None - """ - Creates a generic "parse error" at the current position. - """ - line, col = self._to_linecol(self._idx) - - if args: - return kind(line, col, *args) - else: - return kind(line, col) - - def _to_linecol(self, offset): # type: (int) -> Tuple[int, int] - cur = 0 - for i, line in enumerate(self._src.splitlines()): - if cur + len(line) + 1 > offset: - return (i + 1, offset - cur) - - cur += len(line) + 1 - - return len(self._src.splitlines()), 0 - def _is_child(self, parent, child): # type: (str, str) -> bool """ Returns whether a key is strictly a child of another key. @@ -256,55 +246,35 @@ def _parse_item(self): # type: () -> Optional[Tuple[Optional[Key], Item]] if the item is value-like. """ self.mark() - saved_idx = self._save_idx() - - while True: - c = self._current - if c == "\n": - # Found a newline; Return all whitespace found up to this point. - self.inc() - - return (None, Whitespace(self.extract())) - elif c in " \t\r": - # Skip whitespace. - if not self.inc(): - return (None, Whitespace(self.extract())) - elif c == "#": - # Found a comment, parse it - indent = self.extract() - cws, comment, trail = self._parse_comment_trail() - - return (None, Comment(Trivia(indent, cws, comment, trail))) - elif c == "[": - # Found a table, delegate to the calling function. - return - else: - # Begining of a KV pair. - # Return to beginning of whitespace so it gets included - # as indentation for the KV about to be parsed. - self._restore_idx(*saved_idx) - key, value = self._parse_key_value(True) - - return key, value - - def _save_idx(self): # type: () -> Tuple[Iterator, int, str] - if PY2: - # Python 2.7 does not allow to directly copy - # an iterator, so we have to make tees of the original - # chars iterator. - chars1, chars2 = itertools.tee(self._chars) - - # We can no longer use the original chars iterator. - self._chars = chars1 + with self._state as state: + while True: + c = self._current + if c == "\n": + # Found a newline; Return all whitespace found up to this point. + self.inc() - return chars2, self._idx, self._current + return None, Whitespace(self.extract()) + elif c in " \t\r": + # Skip whitespace. + if not self.inc(): + return None, Whitespace(self.extract()) + elif c == "#": + # Found a comment, parse it + indent = self.extract() + cws, comment, trail = self._parse_comment_trail() - return copy(self._chars), self._idx, self._current + return None, Comment(Trivia(indent, cws, comment, trail)) + elif c == "[": + # Found a table, delegate to the calling function. + return + else: + # Begining of a KV pair. + # Return to beginning of whitespace so it gets included + # as indentation for the KV about to be parsed. + state.restore = True + break - def _restore_idx(self, chars, idx, current): # type: (Iterator, int, str) -> None - self._chars = chars - self._idx = idx - self._current = current + return self._parse_key_value(True) def _parse_comment_trail(self): # type: () -> Tuple[str, str, str] """ @@ -341,7 +311,7 @@ def _parse_comment_trail(self): # type: () -> Tuple[str, str, str] elif c in " \t\r": self.inc() else: - raise self.parse_error(UnexpectedCharError, (c)) + raise self.parse_error(UnexpectedCharError, c) if self.end(): break @@ -361,9 +331,7 @@ def _parse_comment_trail(self): # type: () -> Tuple[str, str, str] return comment_ws, comment, trail - def _parse_key_value( - self, parse_comment=False, inline=True - ): # type: (bool, bool) -> (Key, Item) + def _parse_key_value(self, parse_comment=False): # type: (bool) -> (Key, Item) # Leading indent self.mark() @@ -383,7 +351,7 @@ def _parse_key_value( while self._current.is_kv_sep() and self.inc(): if self._current == "=": if found_equals: - raise self.parse_error(UnexpectedCharError, ("=",)) + raise self.parse_error(UnexpectedCharError, "=") else: found_equals = True pass @@ -473,7 +441,7 @@ def _parse_bare_key(self): # type: () -> Key def _handle_dotted_key( self, container, key, value - ): # type: (Container, Key) -> None + ): # type: (Container, Key, Any) -> None names = tuple(self._split_table_name(key.key)) name = names[0] name._dotted = True @@ -510,119 +478,199 @@ def _parse_value(self): # type: () -> Item Attempts to parse a value at the current position. """ self.mark() + c = self._current trivia = Trivia() - c = self._current - if c == '"': + if c == StringType.SLB.value: return self._parse_basic_string() - elif c == "'": + elif c == StringType.SLL.value: return self._parse_literal_string() - elif c == "t" and self._src[self._idx :].startswith("true"): - # Boolean: true - self.inc_n(4) + elif c == BoolType.TRUE.value[0]: + return self._parse_true() + elif c == BoolType.FALSE.value[0]: + return self._parse_false() + elif c == "[": + return self._parse_array() + elif c == "{": + return self._parse_inline_table() + elif c in "+-" or self._peek(4) in { + "+inf", + "-inf", + "inf", + "+nan", + "-nan", + "nan", + }: + # Number + while self._current not in " \t\n\r#,]}" and self.inc(): + pass - return Bool(True, trivia) - elif c == "f" and self._src[self._idx :].startswith("false"): - # Boolean: true - self.inc_n(5) + raw = self.extract() - return Bool(False, trivia) - elif c == "[": - # Array - elems = [] # type: List[Item] - self.inc() + item = self._parse_number(raw, trivia) + if item is not None: + return item - while self._current != "]": - self.mark() - while self._current.is_ws() or self._current == ",": - self.inc() + raise self.parse_error(InvalidNumberError) + elif c in string.digits: + # Integer, Float, Date, Time or DateTime + while self._current not in " \t\n\r#,]}" and self.inc(): + pass - if self._idx != self._marker: - elems.append(Whitespace(self.extract())) + raw = self.extract() - if self._current == "]": - break + m = RFC_3339_LOOSE.match(raw) + if m: + if m.group(1) and m.group(5): + # datetime + try: + return DateTime(parse_rfc3339(raw), trivia, raw) + except ValueError: + raise self.parse_error(InvalidDateTimeError) + + if m.group(1): + try: + return Date(parse_rfc3339(raw), trivia, raw) + except ValueError: + raise self.parse_error(InvalidDateError) + + if m.group(5): + try: + return Time(parse_rfc3339(raw), trivia, raw) + except ValueError: + raise self.parse_error(InvalidTimeError) - if self._current == "#": - cws, comment, trail = self._parse_comment_trail() + item = self._parse_number(raw, trivia) + if item is not None: + return item - next_ = Comment(Trivia("", cws, comment, trail)) - else: - next_ = self._parse_value() + raise self.parse_error(InvalidNumberError) + else: + raise self.parse_error(UnexpectedCharError, c) - elems.append(next_) + def _parse_true(self): + return self._parse_bool(BoolType.TRUE) - self.inc() + def _parse_false(self): + return self._parse_bool(BoolType.FALSE) - try: - res = Array(elems, trivia) - except ValueError: - raise self.parse_error(MixedArrayTypesError) + def _parse_bool(self, style): # type: (BoolType) -> Bool + with self._state: + style = BoolType(style) - if res.is_homogeneous(): - return res + # only keep parsing for bool if the characters match the style + # try consuming rest of chars in style + for c in style: + self.consume(c, min=1, max=1) - raise self.parse_error(MixedArrayTypesError) - elif c == "{": - # Inline table - elems = Container(True) - self.inc() + return Bool(style, Trivia()) - while self._current != "}": - self.mark() - while self._current.is_spaces() or self._current == ",": - self.inc() + def _parse_array(self): # type: () -> Array + # Consume opening bracket, EOF here is an issue (middle of array) + self.inc(exception=UnexpectedEofError) - if self._idx != self._marker: - ws = self.extract().lstrip(",") - if ws: - elems.append(None, Whitespace(ws)) + elems = [] # type: List[Item] + prev_value = None + while True: + # consume whitespace + mark = self._idx + self.consume(TOMLChar.SPACES) + newline = self.consume(TOMLChar.NL) + indent = self._src[mark : self._idx] + if newline: + elems.append(Whitespace(indent)) + continue - if self._current == "}": - break + # consume comment + if self._current == "#": + cws, comment, trail = self._parse_comment_trail() + elems.append(Comment(Trivia(indent, cws, comment, trail))) + continue - key, val = self._parse_key_value(False, inline=True) - elems.append(key, val) + # consume indent + if indent: + elems.append(Whitespace(indent)) + continue - self.inc() + # consume value + if not prev_value: + try: + elems.append(self._parse_value()) + prev_value = True + continue + except UnexpectedCharError: + pass - return InlineTable(elems, trivia) - elif c in string.digits + "+-" or self._peek(4) in { - "+inf", - "-inf", - "inf", - "+nan", - "-nan", - "nan", - }: - # Integer, Float, Date, Time or DateTime - while self._current not in " \t\n\r#,]}" and self.inc(): - pass + # consume comma + if prev_value and self._current == ",": + self.inc(exception=UnexpectedEofError) + elems.append(Whitespace(",")) + prev_value = False + continue - raw = self.extract() + # consume closing bracket + if self._current == "]": + # consume closing bracket, EOF here doesn't matter + self.inc() + break - item = self._parse_number(raw, trivia) - if item is not None: - return item + raise self.parse_error(UnexpectedCharError, self._current) - try: - res = parse_rfc3339(raw) - except ValueError: - res = None + try: + res = Array(elems, Trivia()) + except ValueError: + pass + else: + if res.is_homogeneous(): + return res - if res is None: - raise self.parse_error(InvalidNumberOrDateError) + raise self.parse_error(MixedArrayTypesError) + + def _parse_inline_table(self): # type: () -> InlineTable + # consume opening bracket, EOF here is an issue (middle of array) + self.inc(exception=UnexpectedEofError) - if isinstance(res, datetime.datetime): - return DateTime(res, trivia, raw) - elif isinstance(res, datetime.time): - return Time(res, trivia, raw) - elif isinstance(res, datetime.date): - return Date(res, trivia, raw) + elems = Container(True) + trailing_comma = None + while True: + # consume leading whitespace + mark = self._idx + self.consume(TOMLChar.SPACES) + raw = self._src[mark : self._idx] + if raw: + elems.add(Whitespace(raw)) + + if not trailing_comma: + # None: empty inline table + # False: previous key-value pair was not followed by a comma + if self._current == "}": + # consume closing bracket, EOF here doesn't matter + self.inc() + break + if trailing_comma is False: + raise self.parse_error(UnexpectedCharError, self._current) else: - raise self.parse_error(InvalidNumberOrDateError) - else: - raise self.parse_error(UnexpectedCharError, (c)) + # True: previous key-value pair was followed by a comma + if self._current == "}": + raise self.parse_error(UnexpectedCharError, self._current) + + key, val = self._parse_key_value(False) + elems.add(key, val) + + # consume trailing whitespace + mark = self._idx + self.consume(TOMLChar.SPACES) + raw = self._src[mark : self._idx] + if raw: + elems.add(Whitespace(raw)) + + # consume trailing comma + trailing_comma = self._current == "," + if trailing_comma: + # consume closing bracket, EOF here is an issue (middle of inline table) + self.inc(exception=UnexpectedEofError) + + return InlineTable(elems, Trivia()) def _parse_number(self, raw, trivia): # type: (str, Trivia) -> Optional[Item] # Leading zeros are not allowed @@ -670,11 +718,13 @@ def _parse_number(self, raw, trivia): # type: (str, Trivia) -> Optional[Item] except ValueError: return - def _parse_literal_string(self): # type: () -> Item - return self._parse_string(StringType.SLL) + def _parse_literal_string(self): # type: () -> String + with self._state: + return self._parse_string(StringType.SLL) - def _parse_basic_string(self): # type: () -> Item - return self._parse_string(StringType.SLB) + def _parse_basic_string(self): # type: () -> String + with self._state: + return self._parse_string(StringType.SLB) def _parse_escaped_char(self, multiline): if multiline and self._current.is_ws(): @@ -696,7 +746,7 @@ def _parse_escaped_char(self, multiline): # the escape followed by whitespace must have a newline # before any other chars if "\n" not in tmp: - raise self.parse_error(InvalidCharInStringError, (self._current,)) + raise self.parse_error(InvalidCharInStringError, self._current) return "" @@ -717,15 +767,17 @@ def _parse_escaped_char(self, multiline): return u - raise self.parse_error(InvalidCharInStringError, (self._current,)) + raise self.parse_error(InvalidUnicodeValueError) - def _parse_string(self, delim): # type: (str) -> Item - delim = StringType(delim) - assert delim.is_singleline() + raise self.parse_error(InvalidCharInStringError, self._current) + def _parse_string(self, delim): # type: (StringType) -> String # only keep parsing for string if the current character matches the delim if self._current != delim.unit: - raise ValueError("Expecting a {!r} character".format(delim)) + raise self.parse_error( + InternalParserError, + "Invalid character for string type {}".format(delim), + ) # consume the opening/first delim, EOF here is an issue # (middle of string or middle of delim) @@ -755,7 +807,7 @@ def _parse_string(self, delim): # type: (str) -> Item while True: if delim.is_singleline() and self._current.is_nl(): # single line cannot have actual newline characters - raise self.parse_error(InvalidCharInStringError, (self._current,)) + raise self.parse_error(InvalidCharInStringError, self._current) elif not escaped and self._current == delim.unit: # try to process current as a closing delim original = self.extract() @@ -781,8 +833,6 @@ def _parse_string(self, delim): # type: (str) -> Item if not close: # if there is no close characters, keep parsing continue else: - close = delim.unit - # consume the closing delim, we do not care if EOF occurs as # that would simply imply the end of self._src self.inc() @@ -817,8 +867,7 @@ def _parse_table( """ if self._current != "[": raise self.parse_error( - InternalParserError, - ("_parse_table() called on non-bracket character.",), + InternalParserError, "_parse_table() called on non-bracket character." ) indent = self.extract() @@ -945,7 +994,7 @@ def _parse_table( else: raise self.parse_error( InternalParserError, - ("_parse_item() returned None on a non-bracket character.",), + "_parse_item() returned None on a non-bracket character.", ) if isinstance(result, Null): @@ -970,32 +1019,27 @@ def _peek_table(self): # type: () -> Tuple[bool, str] Returns the name of the table about to be parsed, as well as whether it is part of an AoT. """ - # Save initial state - idx = self._save_idx() - marker = self._marker - - if self._current != "[": - raise self.parse_error( - InternalParserError, ("_peek_table() entered on non-bracket character",) - ) + # we always want to restore after exiting this scope + with self._state(save_marker=True, restore=True): + if self._current != "[": + raise self.parse_error( + InternalParserError, + "_peek_table() entered on non-bracket character", + ) - # AoT - self.inc() - is_aot = False - if self._current == "[": + # AoT self.inc() - is_aot = True - - self.mark() + is_aot = False + if self._current == "[": + self.inc() + is_aot = True - while self._current != "]" and self.inc(): - table_name = self.extract() + self.mark() - # Restore initial state - self._restore_idx(*idx) - self._marker = marker + while self._current != "]" and self.inc(): + table_name = self.extract() - return is_aot, table_name + return is_aot, table_name def _parse_aot(self, first, name_first): # type: (Table, str) -> AoT """ @@ -1022,57 +1066,53 @@ def _peek(self, n): # type: (int) -> str n is the max number of characters that will be peeked. """ - idx = self._save_idx() - buf = "" - for _ in range(n): - if self._current not in " \t\n\r#,]}": - buf += self._current - self.inc() - continue - - break - - self._restore_idx(*idx) + # we always want to restore after exiting this scope + with self._state(restore=True): + buf = "" + for _ in range(n): + if self._current not in " \t\n\r#,]}": + buf += self._current + self.inc() + continue - return buf + break + return buf - def _peek_unicode(self, is_long): # type: () -> Tuple[bool, str] + def _peek_unicode( + self, is_long + ): # type: (bool) -> Tuple[Optional[str], Optional[str]] """ Peeks ahead non-intrusively by cloning then restoring the initial state of the parser. Returns the unicode value is it's a valid one else None. """ - # Save initial state - idx = self._save_idx() - marker = self._marker - - if self._current not in {"u", "U"}: - raise self.parse_error( - InternalParserError, ("_peek_unicode() entered on non-unicode value") - ) + # we always want to restore after exiting this scope + with self._state(save_marker=True, restore=True): + if self._current not in {"u", "U"}: + raise self.parse_error( + InternalParserError, "_peek_unicode() entered on non-unicode value" + ) - # AoT - self.inc() # Dropping prefix - self.mark() + self.inc() # Dropping prefix + self.mark() - if is_long: - chars = 8 - else: - chars = 4 + if is_long: + chars = 8 + else: + chars = 4 - if not self.inc_n(chars): - value, extracted = None, None - else: - extracted = self.extract() + if not self.inc_n(chars): + value, extracted = None, None + else: + extracted = self.extract() - try: - value = chr(int(extracted, 16)) - except ValueError: - value = None + if extracted[0].lower() == "d" and extracted[1].strip("01234567"): + return None, None - # Restore initial state - self._restore_idx(*idx) - self._marker = marker + try: + value = chr(int(extracted, 16)) + except ValueError: + value = None - return value, extracted + return value, extracted diff --git a/pipenv/vendor/tomlkit/source.py b/pipenv/vendor/tomlkit/source.py new file mode 100644 index 0000000000..1a96e05893 --- /dev/null +++ b/pipenv/vendor/tomlkit/source.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import itertools + +from copy import copy +from typing import Optional +from typing import Tuple + +from ._compat import PY2 +from ._compat import unicode +from .exceptions import UnexpectedEofError +from .exceptions import UnexpectedCharError +from .exceptions import ParseError +from .toml_char import TOMLChar + + +class _State: + def __init__( + self, source, save_marker=False, restore=False + ): # type: (_Source, Optional[bool], Optional[bool]) -> None + self._source = source + self._save_marker = save_marker + self.restore = restore + + def __enter__(self): # type: () -> None + # Entering this context manager - save the state + if PY2: + # Python 2.7 does not allow to directly copy + # an iterator, so we have to make tees of the original + # chars iterator. + self._source._chars, self._chars = itertools.tee(self._source._chars) + else: + self._chars = copy(self._source._chars) + self._idx = self._source._idx + self._current = self._source._current + self._marker = self._source._marker + + return self + + def __exit__(self, exception_type, exception_val, trace): + # Exiting this context manager - restore the prior state + if self.restore or exception_type: + self._source._chars = self._chars + self._source._idx = self._idx + self._source._current = self._current + if self._save_marker: + self._source._marker = self._marker + + # Restore exceptions are silently consumed, other exceptions need to + # propagate + return exception_type is None + + +class _StateHandler: + """ + State preserver for the Parser. + """ + + def __init__(self, source): # type: (Source) -> None + self._source = source + self._states = [] + + def __call__(self, *args, **kwargs): + return _State(self._source, *args, **kwargs) + + def __enter__(self): # type: () -> None + state = self() + self._states.append(state) + return state.__enter__() + + def __exit__(self, exception_type, exception_val, trace): + state = self._states.pop() + return state.__exit__(exception_type, exception_val, trace) + + +class Source(unicode): + EOF = TOMLChar("\0") + + def __init__(self, _): # type: (unicode) -> None + super(Source, self).__init__() + + # Collection of TOMLChars + self._chars = iter([(i, TOMLChar(c)) for i, c in enumerate(self)]) + + self._idx = 0 + self._marker = 0 + self._current = TOMLChar("") + + self._state = _StateHandler(self) + + self.inc() + + def reset(self): + # initialize both idx and current + self.inc() + + # reset marker + self.mark() + + @property + def state(self): # type: () -> _StateHandler + return self._state + + @property + def idx(self): # type: () -> int + return self._idx + + @property + def current(self): # type: () -> TOMLChar + return self._current + + @property + def marker(self): # type: () -> int + return self._marker + + def extract(self): # type: () -> unicode + """ + Extracts the value between marker and index + """ + return self[self._marker : self._idx] + + def inc(self, exception=None): # type: (Optional[ParseError.__class__]) -> bool + """ + Increments the parser if the end of the input has not been reached. + Returns whether or not it was able to advance. + """ + try: + self._idx, self._current = next(self._chars) + + return True + except StopIteration: + self._idx = len(self) + self._current = self.EOF + if exception: + raise self.parse_error(exception) + + return False + + def inc_n(self, n, exception=None): # type: (int, Exception) -> bool + """ + Increments the parser by n characters + if the end of the input has not been reached. + """ + for _ in range(n): + if not self.inc(exception=exception): + return False + + return True + + def consume(self, chars, min=0, max=-1): + """ + Consume chars until min/max is satisfied is valid. + """ + while self.current in chars and max != 0: + min -= 1 + max -= 1 + if not self.inc(): + break + + # failed to consume minimum number of characters + if min > 0: + self.parse_error(UnexpectedCharError) + + def end(self): # type: () -> bool + """ + Returns True if the parser has reached the end of the input. + """ + return self._current is self.EOF + + def mark(self): # type: () -> None + """ + Sets the marker to the index's current position + """ + self._marker = self._idx + + def parse_error( + self, exception=ParseError, *args + ): # type: (ParseError.__class__, ...) -> ParseError + """ + Creates a generic "parse error" at the current position. + """ + line, col = self._to_linecol() + + return exception(line, col, *args) + + def _to_linecol(self): # type: () -> Tuple[int, int] + cur = 0 + for i, line in enumerate(self.splitlines()): + if cur + len(line) + 1 > self.idx: + return (i + 1, self.idx - cur) + + cur += len(line) + 1 + + return len(self.splitlines()), 0 diff --git a/pipenv/vendor/tomlkit/toml_char.py b/pipenv/vendor/tomlkit/toml_char.py index 5164ea8b2a..02c5517289 100644 --- a/pipenv/vendor/tomlkit/toml_char.py +++ b/pipenv/vendor/tomlkit/toml_char.py @@ -16,44 +16,51 @@ def __init__(self, c): if len(self) > 1: raise ValueError("A TOML character must be of length 1") + BARE = string.ascii_letters + string.digits + "-_" + KV = "= \t" + NUMBER = string.digits + "+-_.e" + SPACES = " \t" + NL = "\n\r" + WS = SPACES + NL + @lru_cache(maxsize=None) def is_bare_key_char(self): # type: () -> bool """ Whether the character is a valid bare key name or not. """ - return self in string.ascii_letters + string.digits + "-" + "_" + return self in self.BARE @lru_cache(maxsize=None) def is_kv_sep(self): # type: () -> bool """ Whether the character is a valid key/value separator ot not. """ - return self in "= \t" + return self in self.KV @lru_cache(maxsize=None) def is_int_float_char(self): # type: () -> bool """ Whether the character if a valid integer or float value character or not. """ - return self in string.digits + "+" + "-" + "_" + "." + "e" + return self in self.NUMBER @lru_cache(maxsize=None) def is_ws(self): # type: () -> bool """ Whether the character is a whitespace character or not. """ - return self in " \t\r\n" + return self in self.WS @lru_cache(maxsize=None) def is_nl(self): # type: () -> bool """ Whether the character is a new line character or not. """ - return self in "\n\r" + return self in self.NL @lru_cache(maxsize=None) def is_spaces(self): # type: () -> bool """ Whether the character is a space or not """ - return self in " \t" + return self in self.SPACES diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py index 631e995958..3b416664dd 100644 --- a/pipenv/vendor/tomlkit/toml_file.py +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -1,5 +1,8 @@ import io +from typing import Any +from typing import Dict + from .api import loads from .toml_document import TOMLDocument diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 45ff0384c0..3228068f84 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -51,3 +51,4 @@ git+https://github.com/sarugaku/passa.git@master#egg=passa cursor==1.2.0 resolvelib==0.2.2 backports.functools_lru_cache==1.5 +tomlkit \ No newline at end of file From 1f7c9ef949e7d68b6dd642158ee35772614cb2e0 Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Sat, 10 Nov 2018 12:48:26 +0800 Subject: [PATCH 17/71] update patch --- pipenv/utils.py | 7 +- pipenv/vendor/tomlkit/api.py | 2 - pipenv/vendor/tomlkit/container.py | 16 +- pipenv/vendor/tomlkit/exceptions.py | 2 - pipenv/vendor/tomlkit/items.py | 13 +- pipenv/vendor/tomlkit/parser.py | 7 - pipenv/vendor/tomlkit/source.py | 2 - pipenv/vendor/tomlkit/toml_file.py | 3 - pipenv/vendor/vendor.txt | 3 +- .../vendor/tomlkit-dump-inline-table.patch | 34 ----- .../patches/vendor/tomlkit-fix.patch | 144 ++++++++++++++++++ .../vendor/tomlkit-typing-imports.patch | 93 ----------- .../patches/vendor/tomlkit-update-items.patch | 27 ---- 13 files changed, 158 insertions(+), 195 deletions(-) delete mode 100644 tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch create mode 100644 tasks/vendoring/patches/vendor/tomlkit-fix.patch delete mode 100644 tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch delete mode 100644 tasks/vendoring/patches/vendor/tomlkit-update-items.patch diff --git a/pipenv/utils.py b/pipenv/utils.py index 9d495fa367..9a74b2b94d 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -102,19 +102,14 @@ def convert_toml_outline_tables(parsed): else: empty_inline_table = toml.TomlDecoder().get_empty_inline_table for section in ("packages", "dev-packages"): - has_outline_table = False - table_data = parsed.get(section, {}).copy() + table_data = parsed.get(section, {}) for package, value in table_data.items(): if hasattr(value, "keys") and not isinstance( value, (tomlkit.items.InlineTable, toml.decoder.InlineTableDict) ): - has_outline_table = True table = empty_inline_table() table.update(value) table_data[package] = table - if has_outline_table: - # We'll lose comments here, only update when necessary - parsed[section] = table_data return parsed diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py index e541c20c17..0ac2675262 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py @@ -1,7 +1,5 @@ import datetime as _datetime -from typing import Tuple - from ._utils import parse_rfc3339 from .container import Container from .items import AoT diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index cb8af1d522..9b5db5cb66 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -1,13 +1,5 @@ from __future__ import unicode_literals -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union - from ._compat import decode from .exceptions import KeyAlreadyPresent from .exceptions import NonExistentKey @@ -17,6 +9,7 @@ from .items import Key from .items import Null from .items import Table +from .items import Trivia from .items import Whitespace from .items import item as _item @@ -221,7 +214,12 @@ def remove(self, key): # type: (Union[Key, str]) -> Container for i in idx: self._body[i] = (None, Null()) else: - self._body[idx] = (None, Null()) + old_data = self._body[idx][1] + trivia = getattr(old_data, "trivia", None) + if trivia and trivia.comment: + self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) + else: + self._body[idx] = (None, Null()) super(Container, self).__delitem__(key.key) diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index 4fbc667bdc..c1a4e620e0 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -1,5 +1,3 @@ -from typing import Optional - class TOMLKitError(Exception): diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 375b5f0251..7035b69ecc 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -7,13 +7,6 @@ from datetime import datetime from datetime import time from enum import Enum -from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Union - from ._compat import PY2 from ._compat import decode @@ -25,6 +18,7 @@ from pipenv.vendor.backports.functools_lru_cache import lru_cache else: from functools import lru_cache +from toml.decoder import InlineTableDict def item(value, _parent=None): @@ -40,7 +34,10 @@ def item(value, _parent=None): elif isinstance(value, float): return Float(value, Trivia(), str(value)) elif isinstance(value, dict): - val = Table(Container(), Trivia(), False) + if isinstance(value, InlineTableDict): + val = InlineTable(Container(), Trivia()) + else: + val = Table(Container(), Trivia(), False) for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): val[k] = item(v, _parent=val) diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 7b9483313f..3f507bb4ca 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -4,13 +4,6 @@ import re import string -from typing import Any -from typing import Generator -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union - from ._compat import chr from ._compat import decode from ._utils import _escaped diff --git a/pipenv/vendor/tomlkit/source.py b/pipenv/vendor/tomlkit/source.py index 1a96e05893..dcfdafd0a5 100644 --- a/pipenv/vendor/tomlkit/source.py +++ b/pipenv/vendor/tomlkit/source.py @@ -4,8 +4,6 @@ import itertools from copy import copy -from typing import Optional -from typing import Tuple from ._compat import PY2 from ._compat import unicode diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py index 3b416664dd..631e995958 100644 --- a/pipenv/vendor/tomlkit/toml_file.py +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -1,8 +1,5 @@ import io -from typing import Any -from typing import Dict - from .api import loads from .toml_document import TOMLDocument diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 3228068f84..9924810eed 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -34,7 +34,7 @@ requirementslib==1.2.5 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.4.6 + tomlkit==0.5.2 shellingham==1.2.7 six==1.11.0 semver==2.8.1 @@ -51,4 +51,3 @@ git+https://github.com/sarugaku/passa.git@master#egg=passa cursor==1.2.0 resolvelib==0.2.2 backports.functools_lru_cache==1.5 -tomlkit \ No newline at end of file diff --git a/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch b/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch deleted file mode 100644 index 8cd6d5cafb..0000000000 --- a/tasks/vendoring/patches/vendor/tomlkit-dump-inline-table.patch +++ /dev/null @@ -1,34 +0,0 @@ -diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py -index 781e2e98..c3c2d59f 100644 ---- a/pipenv/vendor/tomlkit/items.py -+++ b/pipenv/vendor/tomlkit/items.py -@@ -21,6 +21,7 @@ if PY2: - from pipenv.vendor.backports.functools_lru_cache import lru_cache - else: - from functools import lru_cache -+from toml.decoder import InlineTableDict - - - def item(value, _parent=None): -@@ -36,7 +37,10 @@ def item(value, _parent=None): - elif isinstance(value, float): - return Float(value, Trivia(), str(value)) - elif isinstance(value, dict): -- val = Table(Container(), Trivia(), False) -+ if isinstance(value, InlineTableDict): -+ val = InlineTable(Container(), Trivia()) -+ else: -+ val = Table(Container(), Trivia(), False) - for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): - val[k] = item(v, _parent=val) - -@@ -1000,6 +1004,9 @@ class InlineTable(Item, dict): - if key is not None: - super(InlineTable, self).__setitem__(key, value) - -+ if hasattr(value, "trivia") and value.trivia.comment: -+ value.trivia.comment = "" -+ - m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) - if not m: - return diff --git a/tasks/vendoring/patches/vendor/tomlkit-fix.patch b/tasks/vendoring/patches/vendor/tomlkit-fix.patch new file mode 100644 index 0000000000..5dd9d3fe1e --- /dev/null +++ b/tasks/vendoring/patches/vendor/tomlkit-fix.patch @@ -0,0 +1,144 @@ +diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py +index e541c20c..0ac26752 100644 +--- a/pipenv/vendor/tomlkit/api.py ++++ b/pipenv/vendor/tomlkit/api.py +@@ -1,7 +1,5 @@ + import datetime as _datetime + +-from typing import Tuple +- + from ._utils import parse_rfc3339 + from .container import Container + from .items import AoT +diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py +index cb8af1d5..56ee2d62 100644 +--- a/pipenv/vendor/tomlkit/container.py ++++ b/pipenv/vendor/tomlkit/container.py +@@ -1,13 +1,5 @@ + from __future__ import unicode_literals + +-from typing import Any +-from typing import Dict +-from typing import Generator +-from typing import List +-from typing import Optional +-from typing import Tuple +-from typing import Union +- + from ._compat import decode + from .exceptions import KeyAlreadyPresent + from .exceptions import NonExistentKey +@@ -17,6 +9,7 @@ from .items import Item + from .items import Key + from .items import Null + from .items import Table ++from .items import Trivia + from .items import Whitespace + from .items import item as _item + +@@ -221,7 +214,12 @@ class Container(dict): + for i in idx: + self._body[i] = (None, Null()) + else: +- self._body[idx] = (None, Null()) ++ old_data = self._body[idx][1] ++ trivia = getattr(old_data, "trivia", None) ++ if trivia and trivia.comment: ++ self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) ++ else: ++ self._body[idx] = (None, Null()) + + super(Container, self).__delitem__(key.key) + +diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py +index 4fbc667b..c1a4e620 100644 +--- a/pipenv/vendor/tomlkit/exceptions.py ++++ b/pipenv/vendor/tomlkit/exceptions.py +@@ -1,5 +1,3 @@ +-from typing import Optional +- + + class TOMLKitError(Exception): + +diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py +index 375b5f02..7035b69e 100644 +--- a/pipenv/vendor/tomlkit/items.py ++++ b/pipenv/vendor/tomlkit/items.py +@@ -7,13 +7,6 @@ from datetime import date + from datetime import datetime + from datetime import time + from enum import Enum +-from typing import Any +-from typing import Dict +-from typing import Generator +-from typing import List +-from typing import Optional +-from typing import Union +- + + from ._compat import PY2 + from ._compat import decode +@@ -25,6 +18,7 @@ if PY2: + from pipenv.vendor.backports.functools_lru_cache import lru_cache + else: + from functools import lru_cache ++from toml.decoder import InlineTableDict + + + def item(value, _parent=None): +@@ -40,7 +34,10 @@ def item(value, _parent=None): + elif isinstance(value, float): + return Float(value, Trivia(), str(value)) + elif isinstance(value, dict): +- val = Table(Container(), Trivia(), False) ++ if isinstance(value, InlineTableDict): ++ val = InlineTable(Container(), Trivia()) ++ else: ++ val = Table(Container(), Trivia(), False) + for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): + val[k] = item(v, _parent=val) + +diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py +index 7b948331..3f507bb4 100644 +--- a/pipenv/vendor/tomlkit/parser.py ++++ b/pipenv/vendor/tomlkit/parser.py +@@ -4,13 +4,6 @@ from __future__ import unicode_literals + import re + import string + +-from typing import Any +-from typing import Generator +-from typing import List +-from typing import Optional +-from typing import Tuple +-from typing import Union +- + from ._compat import chr + from ._compat import decode + from ._utils import _escaped +diff --git a/pipenv/vendor/tomlkit/source.py b/pipenv/vendor/tomlkit/source.py +index 1a96e058..dcfdafd0 100644 +--- a/pipenv/vendor/tomlkit/source.py ++++ b/pipenv/vendor/tomlkit/source.py +@@ -4,8 +4,6 @@ from __future__ import unicode_literals + import itertools + + from copy import copy +-from typing import Optional +-from typing import Tuple + + from ._compat import PY2 + from ._compat import unicode +diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py +index 3b416664..631e9959 100644 +--- a/pipenv/vendor/tomlkit/toml_file.py ++++ b/pipenv/vendor/tomlkit/toml_file.py +@@ -1,8 +1,5 @@ + import io + +-from typing import Any +-from typing import Dict +- + from .api import loads + from .toml_document import TOMLDocument + diff --git a/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch b/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch deleted file mode 100644 index 2288b51333..0000000000 --- a/tasks/vendoring/patches/vendor/tomlkit-typing-imports.patch +++ /dev/null @@ -1,93 +0,0 @@ -diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py -index e541c20c..0ac26752 100644 ---- a/pipenv/vendor/tomlkit/api.py -+++ b/pipenv/vendor/tomlkit/api.py -@@ -1,7 +1,5 @@ - import datetime as _datetime - --from typing import Tuple -- - from ._utils import parse_rfc3339 - from .container import Container - from .items import AoT -diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py -index c1d2d7c6..a7876ff1 100644 ---- a/pipenv/vendor/tomlkit/container.py -+++ b/pipenv/vendor/tomlkit/container.py -@@ -1,13 +1,5 @@ - from __future__ import unicode_literals - --from typing import Any --from typing import Dict --from typing import Generator --from typing import List --from typing import Optional --from typing import Tuple --from typing import Union -- - from ._compat import decode - from .exceptions import KeyAlreadyPresent - from .exceptions import NonExistentKey -diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py -index 8d48bf19..d889a924 100644 ---- a/pipenv/vendor/tomlkit/exceptions.py -+++ b/pipenv/vendor/tomlkit/exceptions.py -@@ -1,6 +1,3 @@ --from typing import Optional -- -- - class TOMLKitError(Exception): - - pass -diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py -index 747dbd50..8807f4b3 100644 ---- a/pipenv/vendor/tomlkit/items.py -+++ b/pipenv/vendor/tomlkit/items.py -@@ -6,14 +6,11 @@ import string - from datetime import date - from datetime import datetime - from datetime import time --from enum import Enum --from typing import Any --from typing import Dict --from typing import Generator --from typing import List --from typing import Optional --from typing import Union -- -+import sys -+if sys.version_info >= (3, 4): -+ from enum import Enum -+else: -+ from pipenv.vendor.backports.enum import Enum - - from ._compat import PY2 - from ._compat import decode -diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py -index b55a3fe4..3d4984d1 100644 ---- a/pipenv/vendor/tomlkit/parser.py -+++ b/pipenv/vendor/tomlkit/parser.py -@@ -7,10 +7,6 @@ import re - import string - - from copy import copy --from typing import Iterator --from typing import Optional --from typing import Tuple --from typing import Union - - from ._compat import PY2 - from ._compat import chr -diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py -index 3b416664..631e9959 100644 ---- a/pipenv/vendor/tomlkit/toml_file.py -+++ b/pipenv/vendor/tomlkit/toml_file.py -@@ -1,8 +1,5 @@ - import io - --from typing import Any --from typing import Dict -- - from .api import loads - from .toml_document import TOMLDocument - diff --git a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch b/tasks/vendoring/patches/vendor/tomlkit-update-items.patch deleted file mode 100644 index ed2fb95eb9..0000000000 --- a/tasks/vendoring/patches/vendor/tomlkit-update-items.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py -index 37014921..987a0790 100644 ---- a/pipenv/vendor/tomlkit/container.py -+++ b/pipenv/vendor/tomlkit/container.py -@@ -9,6 +9,7 @@ from .items import Item - from .items import Key - from .items import Null - from .items import Table -+from .items import Trivia - from .items import Whitespace - from .items import item as _item - -@@ -189,8 +190,12 @@ class Container(dict): - if idx is None: - raise NonExistentKey(key) - -- self._body[idx] = (None, Null()) -- -+ old_data = self._body[idx][1] -+ trivia = getattr(old_data, "trivia", None) -+ if trivia and getattr(trivia, "comment", None): -+ self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) -+ else: -+ self._body[idx] = (None, Null()) - super(Container, self).__delitem__(key.key) - - return self From de98b874ec22acdedc1155fa9e6db3ac40b2a016 Mon Sep 17 00:00:00 2001 From: frostming Date: Sat, 10 Nov 2018 14:51:19 +0800 Subject: [PATCH 18/71] backports import --- pipenv/vendor/tomlkit/items.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 7035b69ecc..39128d30ff 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -6,7 +6,10 @@ from datetime import date from datetime import datetime from datetime import time -from enum import Enum +try: + from enum import Enum +except ImportError: + from pipenv.vendor.backports.enum import Enum from ._compat import PY2 from ._compat import decode From f5c7c58be01e4a0fa17c64d5790e16c8cfc56bec Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Sat, 10 Nov 2018 15:41:09 +0800 Subject: [PATCH 19/71] Change fallback style --- pipenv/vendor/tomlkit/items.py | 6 +-- .../patches/vendor/tomlkit-fix.patch | 50 +++++++++++-------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 39128d30ff..cccfd4a18e 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -6,10 +6,6 @@ from datetime import date from datetime import datetime from datetime import time -try: - from enum import Enum -except ImportError: - from pipenv.vendor.backports.enum import Enum from ._compat import PY2 from ._compat import decode @@ -18,8 +14,10 @@ from ._utils import escape_string if PY2: + from pipenv.vendor.backports.enum import Enum from pipenv.vendor.backports.functools_lru_cache import lru_cache else: + from enum import Enum from functools import lru_cache from toml.decoder import InlineTableDict diff --git a/tasks/vendoring/patches/vendor/tomlkit-fix.patch b/tasks/vendoring/patches/vendor/tomlkit-fix.patch index 5dd9d3fe1e..36e2f8083d 100644 --- a/tasks/vendoring/patches/vendor/tomlkit-fix.patch +++ b/tasks/vendoring/patches/vendor/tomlkit-fix.patch @@ -4,19 +4,19 @@ index e541c20c..0ac26752 100644 +++ b/pipenv/vendor/tomlkit/api.py @@ -1,7 +1,5 @@ import datetime as _datetime - + -from typing import Tuple - from ._utils import parse_rfc3339 from .container import Container from .items import AoT diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py -index cb8af1d5..56ee2d62 100644 +index cb8af1d5..9b5db5cb 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -1,13 +1,5 @@ from __future__ import unicode_literals - + -from typing import Any -from typing import Dict -from typing import Generator @@ -35,7 +35,7 @@ index cb8af1d5..56ee2d62 100644 +from .items import Trivia from .items import Whitespace from .items import item as _item - + @@ -221,7 +214,12 @@ class Container(dict): for i in idx: self._body[i] = (None, Null()) @@ -47,9 +47,9 @@ index cb8af1d5..56ee2d62 100644 + self._body[idx] = (None, Comment(Trivia(comment_ws="", comment=trivia.comment))) + else: + self._body[idx] = (None, Null()) - + super(Container, self).__delitem__(key.key) - + diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index 4fbc667b..c1a4e620 100644 --- a/pipenv/vendor/tomlkit/exceptions.py @@ -57,17 +57,18 @@ index 4fbc667b..c1a4e620 100644 @@ -1,5 +1,3 @@ -from typing import Optional - - + class TOMLKitError(Exception): - + diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py -index 375b5f02..7035b69e 100644 +index 375b5f02..cccfd4a1 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py -@@ -7,13 +7,6 @@ from datetime import date +@@ -6,14 +6,6 @@ import string + from datetime import date from datetime import datetime from datetime import time - from enum import Enum +-from enum import Enum -from typing import Any -from typing import Dict -from typing import Generator @@ -75,18 +76,23 @@ index 375b5f02..7035b69e 100644 -from typing import Optional -from typing import Union - - + from ._compat import PY2 from ._compat import decode -@@ -25,6 +18,7 @@ if PY2: +@@ -22,9 +14,12 @@ from ._compat import unicode + from ._utils import escape_string + + if PY2: ++ from pipenv.vendor.backports.enum import Enum from pipenv.vendor.backports.functools_lru_cache import lru_cache else: ++ from enum import Enum from functools import lru_cache +from toml.decoder import InlineTableDict - - + + def item(value, _parent=None): -@@ -40,7 +34,10 @@ def item(value, _parent=None): +@@ -40,7 +35,10 @@ def item(value, _parent=None): elif isinstance(value, float): return Float(value, Trivia(), str(value)) elif isinstance(value, dict): @@ -97,7 +103,7 @@ index 375b5f02..7035b69e 100644 + val = Table(Container(), Trivia(), False) for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])): val[k] = item(v, _parent=val) - + diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 7b948331..3f507bb4 100644 --- a/pipenv/vendor/tomlkit/parser.py @@ -105,7 +111,7 @@ index 7b948331..3f507bb4 100644 @@ -4,13 +4,6 @@ from __future__ import unicode_literals import re import string - + -from typing import Any -from typing import Generator -from typing import List @@ -122,11 +128,11 @@ index 1a96e058..dcfdafd0 100644 +++ b/pipenv/vendor/tomlkit/source.py @@ -4,8 +4,6 @@ from __future__ import unicode_literals import itertools - + from copy import copy -from typing import Optional -from typing import Tuple - + from ._compat import PY2 from ._compat import unicode diff --git a/pipenv/vendor/tomlkit/toml_file.py b/pipenv/vendor/tomlkit/toml_file.py @@ -135,10 +141,10 @@ index 3b416664..631e9959 100644 +++ b/pipenv/vendor/tomlkit/toml_file.py @@ -1,8 +1,5 @@ import io - + -from typing import Any -from typing import Dict - from .api import loads from .toml_document import TOMLDocument - + From 2b90c89d1f518a543f775cb811ee8b5565ee1366 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 16:30:42 -0500 Subject: [PATCH 20/71] Revendor requirementslib - Implement improvements and bugfixes in codebase - Remote archives will now resolve properly Signed-off-by: Dan Ryan --- pipenv/__init__.py | 6 +- pipenv/_compat.py | 2 +- pipenv/core.py | 15 +- pipenv/project.py | 105 ++--- pipenv/utils.py | 14 +- pipenv/vendor/requirementslib/__init__.py | 4 + pipenv/vendor/requirementslib/exceptions.py | 75 ++++ .../vendor/requirementslib/models/lockfile.py | 68 +++- .../vendor/requirementslib/models/pipfile.py | 44 +- .../requirementslib/models/requirements.py | 340 +++++++++++----- .../requirementslib/models/setup_info.py | 378 ++++++++++++++++++ pipenv/vendor/requirementslib/models/utils.py | 41 +- pipenv/vendor/requirementslib/models/vcs.py | 3 +- tests/integration/conftest.py | 13 + tests/integration/test_uninstall.py | 6 +- 15 files changed, 944 insertions(+), 170 deletions(-) create mode 100644 pipenv/vendor/requirementslib/models/setup_info.py diff --git a/pipenv/__init__.py b/pipenv/__init__.py index f8a1a8b3e1..ba4dd9c3e0 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -10,7 +10,7 @@ from .__version__ import __version__ -PIPENV_ROOT = os.path.dirname(os.path.realpath(__file__)) +PIPENV_ROOT = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) PIPENV_VENDOR = os.sep.join([PIPENV_ROOT, "vendor"]) PIPENV_PATCHED = os.sep.join([PIPENV_ROOT, "patched"]) # Inject vendored directory into system path. @@ -27,11 +27,13 @@ if sys.version_info >= (3, 1) and sys.version_info <= (3, 6): if sys.stdout.isatty() and sys.stderr.isatty(): import io + import atexit sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(sys.stdout.close) sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(sys.stdout.close) os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = fs_str("1") -os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip") # Hack to make things work better. try: diff --git a/pipenv/_compat.py b/pipenv/_compat.py index 6e5ae6a7fb..fb2c01477f 100644 --- a/pipenv/_compat.py +++ b/pipenv/_compat.py @@ -382,7 +382,7 @@ def decode_output(output): except (AttributeError, UnicodeDecodeError, UnicodeEncodeError): if six.PY2: output = unicode.translate(vistir.misc.to_text(output), - UNICODE_TO_ASCII_TRANSLATION_MAP) + UNICODE_TO_ASCII_TRANSLATION_MAP) else: output = output.translate(UNICODE_TO_ASCII_TRANSLATION_MAP) output = output.encode(DEFAULT_ENCODING, "replace") diff --git a/pipenv/core.py b/pipenv/core.py index f9b8eaeec2..3cbd1645f1 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -40,8 +40,8 @@ clean_resolved_dep, parse_indexes, escape_cmd, - fix_venv_site, create_spinner, + get_canonical_names ) from . import environments, pep508checker, progress from .environments import ( @@ -1296,7 +1296,7 @@ def pip_install( pypi_mirror=None, trusted_hosts=None ): - from notpip._internal import logger as piplogger + from pipenv.patched.notpip._internal import logger as piplogger from .utils import Mapping from .vendor.urllib3.util import parse_url @@ -1746,11 +1746,11 @@ def do_install( if requirements or package_args or project.pipfile_exists: skip_requirements = True # Don't attempt to install develop and default packages if Pipfile is missing - if not project.pipfile_exists and not (packages or dev) and not code: - if not (skip_lock or deploy): - raise exceptions.PipfileNotFound(project.pipfile_location) - elif (skip_lock or deploy) and not project.lockfile_exists: - raise exceptions.LockfileNotFound(project.lockfile_location) + if not project.pipfile_exists and not (package_args or dev) and not code: + if not (ignore_pipfile or deploy): + raise exceptions.PipfileNotFound(project.path_to("Pipfile")) + elif ((skip_lock and deploy) or ignore_pipfile) and not project.lockfile_exists: + raise exceptions.LockfileNotFound(project.path_to("Pipfile.lock")) concurrent = not sequential # Ensure that virtualenv is available. ensure_project( @@ -2092,7 +2092,6 @@ def do_uninstall( ) ) package_names = develop - fix_venv_site(project.env_paths["lib"]) # Remove known "bad packages" from the list. bad_pkgs = set([canonicalize_name(pkg) for pkg in BAD_PACKAGES]) for bad_package in BAD_PACKAGES: diff --git a/pipenv/project.py b/pipenv/project.py index 26b4cf0ce6..d4713b89f2 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -804,7 +804,7 @@ def create_pipfile(self, python=None): .lstrip("\n") .split("\n") ) - sources = [DEFAULT_SOURCE] + sources = [DEFAULT_SOURCE,] for i, index in enumerate(indexes): if not index: continue @@ -831,55 +831,68 @@ def create_pipfile(self, python=None): version = python_version(required_python) or PIPENV_DEFAULT_PYTHON_VERSION if version and len(version) >= 3: data[u"requires"] = {"python_version": version[: len("2.7")]} - self.write_toml(data, "Pipfile") + self.write_toml(data) def get_or_create_lockfile(self): - from requirementslib.models.lockfile import Lockfile as Req_Lockfile + from pipenv.vendor.requirementslib.models.lockfile import Lockfile as Req_Lockfile lockfile = None - try: - lockfile = Req_Lockfile.load(self.lockfile_location) - except OSError: - lockfile = Req_Lockfile(self.lockfile_content) - return lockfile + if self.lockfile_exists: + try: + lockfile = Req_Lockfile.load(self.lockfile_location) + except OSError: + lockfile = Req_Lockfile.from_data(self.lockfile_location, self.lockfile_content) else: - if lockfile._lockfile is not None: - return lockfile - if self.lockfile_exists and self.lockfile_content: - from .vendor.plette.lockfiles import Lockfile - lockfile_dict = self.lockfile_content.copy() - sources = lockfile_dict["_meta"].get("sources", []) - if not sources: - sources = self.pipfile_sources - elif not isinstance(sources, list): - sources = [sources,] - lockfile_dict["_meta"]["sources"] = [ - { - "name": s["name"], - "url": s["url"], - "verify_ssl": ( - s["verify_ssl"] if isinstance(s["verify_ssl"], bool) else ( - True if s["verify_ssl"].lower() == "true" else False - ) + lockfile = Req_Lockfile.from_data(path=self.lockfile_location, data=self._lockfile, meta_from_project=False) + if lockfile._lockfile is not None: + return lockfile + if self.lockfile_exists and self.lockfile_content: + lockfile_dict = self.lockfile_content.copy() + sources = lockfile_dict.get("_meta", {}).get("sources", []) + if not sources: + sources = self.pipfile_sources + elif not isinstance(sources, list): + sources = [sources,] + lockfile_dict["_meta"]["sources"] = [ + { + "name": s["name"], + "url": s["url"], + "verify_ssl": ( + s["verify_ssl"] if isinstance(s["verify_ssl"], bool) else ( + True if s["verify_ssl"].lower() == "true" else False ) - } for s in sources - ] - _created_lockfile = Lockfile(lockfile_dict) - lockfile._lockfile = lockfile.projectfile.model = _created_lockfile - return lockfile - elif self.pipfile_exists: - from .vendor.plette.lockfiles import Lockfile, PIPFILE_SPEC_CURRENT - lockfile_dict = { - "_meta": { - "hash": {"sha256": self.calculate_pipfile_hash()}, - "pipfile-spec": PIPFILE_SPEC_CURRENT, - "sources": self.pipfile_sources, - "requires": self.parsed_pipfile.get("requires", {}) - }, - "default": self._lockfile["default"].copy(), - "develop": self._lockfile["develop"].copy() - } - lockfile._lockfile = Lockfile(lockfile_dict) - return lockfile + ) + } for s in sources + ] + _created_lockfile = Req_Lockfile.from_data( + path=self.lockfile_location, data=lockfile_dict, meta_from_project=False + ) + lockfile._lockfile = lockfile.projectfile.model = _created_lockfile + return lockfile + elif self.pipfile_exists: + lockfile_dict = { + "default": self._lockfile["default"].copy(), + "develop": self._lockfile["develop"].copy() + } + lockfile_dict.update({"_meta": self.get_lockfile_meta()}) + _created_lockfile = Req_Lockfile.from_data( + path=self.lockfile_location, data=lockfile_dict, meta_from_project=False + ) + lockfile._lockfile = _created_lockfile + return lockfile + + def get_lockfile_meta(self): + from .vendor.plette.lockfiles import PIPFILE_SPEC_CURRENT + sources = self.lockfile_content.get("_meta", {}).get("sources", []) + if not sources: + sources = self.pipfile_sources + elif not isinstance(sources, list): + sources = [sources,] + return { + "hash": {"sha256": self.calculate_pipfile_hash()}, + "pipfile-spec": PIPFILE_SPEC_CURRENT, + "sources": sources, + "requires": self.parsed_pipfile.get("requires", {}) + } def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" @@ -943,7 +956,7 @@ def pipfile_sources(self): @property def sources(self): if self.lockfile_exists and hasattr(self.lockfile_content, "keys"): - meta_ = self.lockfile_content["_meta"] + meta_ = self.lockfile_content.get("_meta", {}) sources_ = meta_.get("sources") if sources_: return sources_ diff --git a/pipenv/utils.py b/pipenv/utils.py index c9feeafdb8..3a9ef307d7 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -18,7 +18,8 @@ from vistir.misc import fs_str six.add_move(six.MovedAttribute("Mapping", "collections", "collections.abc")) -from six.moves import Mapping +six.add_move(six.MovedAttribute("Sequence", "collections", "collections.abc")) +from six.moves import Mapping, Sequence from vistir.compat import ResourceWarning @@ -1035,6 +1036,17 @@ def path_to_url(path): return Path(normalize_drive(os.path.abspath(path))).as_uri() +def get_canonical_names(packages): + """Canonicalize a list of packages and return a set of canonical names""" + from .vendor.packaging.utils import canonicalize_name + + if not isinstance(packages, Sequence): + if not isinstance(packages, six.string_types): + return packages + packages = [packages,] + return set([canonicalize_name(pkg) for pkg in packages if pkg]) + + def walk_up(bottom): """Mimic os.walk, but walk 'up' instead of down the directory tree. From: https://gist.github.com/zdavkeos/1098474 diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index edbab5bc8a..881e9ac9d9 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -2,6 +2,10 @@ __version__ = '1.2.6' import logging +import warnings + +warnings.filterwarnings("ignore", category=ResourceWarning) + logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index de8bf8ef94..1a73f98e77 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import import errno +import os import six +import sys + + +from vistir.compat import FileNotFoundError if six.PY2: @@ -15,3 +20,73 @@ def __init__(self, *args, **kwargs): class RequirementError(Exception): pass + + +class MissingParameter(Exception): + def __init__(self, param): + super(Exception, self).__init__() + print("Missing parameter: %s" % param, file=sys.stderr, flush=True) + + +class FileCorruptException(OSError): + def __init__(self, path, *args, **kwargs): + path = path + backup_path = kwargs.pop("backup_path", None) + if not backup_path and args: + args = reversed(args) + backup_path = args.pop() + if not isinstance(backup_path, six.string_types) or not os.path.exists(os.path.abspath(os.path.dirname(backup_path))): + args.append(backup_path) + backup_path = None + if args: + args = reversed(args) + self.path = path + self.backup_path = backup_path + self.show(self.path, self.backup_path) + super(OSError, self).__init__(path, *args, **kwargs) + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load file at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced." + print("The file is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class LockfileCorruptException(FileCorruptException): + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load lockfile at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced on the next lock." + print("Your lockfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class PipfileCorruptException(FileCorruptException): + + @classmethod + def show(cls, path, backup_path=None): + print("ERROR: Failed to load Pipfile at %s" % path, file=sys.stderr, flush=True) + if backup_path: + msg = "it will be backed up to %s and removed" % backup_path + else: + msg = "it will be removed and replaced on the next lock." + print("Your Pipfile is corrupt, %s" % msg, file=sys.stderr, flush=True) + + +class PipfileNotFound(FileNotFoundError): + def __init__(self, path, *args, **kwargs): + self.errno = errno.ENOENT + self.path = path + self.show(path) + super(PipfileNotFound, self).__init__(*args, **kwargs) + + @classmethod + def show(cls, path): + print("ERROR: The file could not be found: %s" % path, file=sys.stderr, flush=True) + print("Aborting...", file=sys.stderr, flush=True) diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 3e48281347..6f61f57ebc 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -9,12 +9,13 @@ import plette.lockfiles import six -from vistir.compat import Path, FileNotFoundError +from vistir.compat import Path, FileNotFoundError, JSONDecodeError from .project import ProjectFile from .requirements import Requirement from .utils import optional_instance_of +from ..exceptions import LockfileCorruptException, PipfileNotFound, MissingParameter from ..utils import is_vcs, is_editable, merge_items DEFAULT_NEWLINES = u"\n" @@ -134,7 +135,17 @@ def read_projectfile(cls, path): return pf @classmethod - def load_projectfile(cls, path, create=True): + def lockfile_from_pipfile(cls, pipfile_path): + from .pipfile import Pipfile + if os.path.isfile(pipfile_path): + if not os.path.isabs(pipfile_path): + pipfile_path = os.path.abspath(pipfile_path) + pipfile = Pipfile.load(os.path.dirname(pipfile_path)) + return plette.lockfiles.Lockfile.with_meta_from(pipfile._pipfile) + raise PipfileNotFound(pipfile_path) + + @classmethod + def load_projectfile(cls, path, create=True, data=None): """Given a path, load or create the necessary lockfile. :param str path: Path to the project root or lockfile @@ -155,8 +166,48 @@ def load_projectfile(cls, path, create=True): elif not lockfile_path.exists() and not create: raise FileNotFoundError("Lockfile does not exist: %s" % lockfile_path.as_posix()) projectfile = cls.read_projectfile(lockfile_path.as_posix()) + if not lockfile_path.exists(): + if not data: + lf = cls.lockfile_from_pipfile(project_path.joinpath("Pipfile")) + else: + lf = plette.lockfiles.Lockfile(data) + projectfile.model = lf return projectfile + @classmethod + def from_data(cls, path, data, meta_from_project=True): + """Create a new lockfile instance from a dictionary. + + :param str path: Path to the project root. + :param dict data: Data to load into the lockfile. + :param bool meta_from_project: Attempt to populate the meta section from the + project root, default True. + """ + + if path is None: + raise MissingParameter("path") + if data is None: + raise MissingParameter("data") + if not isinstance(data, dict): + raise TypeError("Expecting a dictionary for parameter 'data'") + path = os.path.abspath(str(path)) + if os.path.isdir(path): + project_path = path + elif not os.path.isdir(path) and os.path.isdir(os.path.dirname(path)): + project_path = os.path.dirname(path) + pipfile_path = os.path.join(project_path, "Pipfile") + lockfile_path = os.path.join(project_path, "Pipfile.lock") + if meta_from_project: + lockfile = cls.lockfile_from_pipfile(pipfile_path) + lockfile.update(data) + else: + lockfile = plette.lockfiles.Lockfile(data) + projectfile = ProjectFile(line_ending=DEFAULT_NEWLINES, location=lockfile_path, model=lockfile) + return cls( + projectfile=projectfile, lockfile=lockfile, + newlines=projectfile.line_ending, path=Path(projectfile.location) + ) + @classmethod def load(cls, path, create=True): """Create a new lockfile instance. @@ -170,7 +221,18 @@ def load(cls, path, create=True): :rtype: :class:`~requirementslib.models.lockfile.Lockfile` """ - projectfile = cls.load_projectfile(path, create=create) + try: + projectfile = cls.load_projectfile(path, create=create) + except JSONDecodeError as e: + path = os.path.abspath(path) + if not os.path.isdir(path): + path = os.path.dirname(path) + path = Path(os.path.join(path, "Pipfile.lock")) + formatted_path = path.as_posix() + backup_path = "%.bak" % formatted_path + LockfileCorruptException.show(formatted_path, backup_path=backup_path) + path.rename(backup_path) + cls.load(formatted_path, create=True) lockfile_path = Path(projectfile.location) creation_args = { "projectfile": projectfile, diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 58d540559e..dbb024be6b 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -23,13 +23,26 @@ is_projectfile = optional_instance_of(ProjectFile) +def reorder_source_keys(data): + for i, entry in enumerate(data["source"]): + table = tomlkit.table() + table["name"] = entry["name"] + table["url"] = entry["url"] + table["verify_ssl"] = entry["verify_ssl"] + data["source"][i] = table + return data + + class PipfileLoader(plette.pipfiles.Pipfile): @classmethod def validate(cls, data): for key, klass in plette.pipfiles.PIPFILE_SECTIONS.items(): if key not in data or key == "source": continue - klass.validate(data[key]) + try: + klass.validate(data[key]) + except Exception: + pass @classmethod def load(cls, f, encoding=None): @@ -37,19 +50,26 @@ def load(cls, f, encoding=None): if encoding is not None: content = content.decode(encoding) _data = tomlkit.loads(content) + _data["source"] = _data.get("source", []) + _data.get("sources", []) + _data = reorder_source_keys(_data) if "source" not in _data: - if "sources" in _data: - _data["source"] = _data["sources"] - content = tomlkit.dumps(_data) - else: - # HACK: There is no good way to prepend a section to an existing - # TOML document, but there's no good way to copy non-structural - # content from one TOML document to another either. Modify the - # TOML content directly, and load the new in-memory document. - sep = "" if content.startswith("\n") else "\n" - content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content data = tomlkit.loads(content) - return cls(data) + data = reorder_source_keys(data) + instance = cls(data) + new_data = reorder_source_keys(instance._data) + instance._data = new_data + return instance + + def __getattribute__(self, key): + if key == "source": + return self._data[key] + return super(PipfileLoader, self).__getattribute__(key) @attr.s(slots=True) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index a36e5ba46c..514114298c 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -36,6 +36,7 @@ add_ssh_scheme_to_git_uri, strip_ssh_from_git_uri, ) +from .setup_info import SetupInfo from .utils import ( HASH_STRING, build_vcs_link, @@ -46,7 +47,6 @@ init_requirement, is_pinned_requirement, make_install_requirement, - optional_instance_of, parse_extras, specs_to_string, split_markers_from_line, @@ -56,6 +56,7 @@ validate_vcs, normalize_name, create_link, + get_pyproject ) @@ -148,13 +149,19 @@ class FileRequirement(object): uri = attr.ib() #: Link object representing the package to clone link = attr.ib() + #: PyProject Requirements + pyproject_requires = attr.ib(default=attr.Factory(list)) + #: PyProject Build System + pyproject_backend = attr.ib(default=None) + #: PyProject Path + pyproject_path = attr.ib(default=None) _has_hashed_name = attr.ib(default=False) #: Package name name = attr.ib() #: A :class:`~pkg_resources.Requirement` isntance req = attr.ib() - #: Whether this is a direct url requirement - is_direct = attr.ib(default=False) + #: Setup metadata e.g. dependencies + setup_info = attr.ib(default=None) @classmethod def get_link_from_line(cls, line): @@ -258,107 +265,110 @@ def get_link_from_line(cls, line): return LinkInfo(vcs_type, prefer, relpath, path, uri, link) - def __attrs_post_init__(self): - if self.req and getattr(self.req, "url"): - self.uri = self.req.url + @property + def setup_py_dir(self): + if self.setup_path: + return os.path.dirname(os.path.abspath(self.setup_path)) + + @property + def dependencies(self): + build_deps = [] + setup_deps = [] + deps = {} + if self.setup_info: + setup_info = self.setup_info.as_dict() + deps.update(setup_info.get("requires", {})) + setup_deps.extend(setup_info.get("setup_requires", [])) + build_deps.extend(setup_info.get("build_requires", [])) + if self.pyproject_requires: + build_deps.extend(self.pyproject_requires) + return deps, setup_deps, build_deps @uri.default def get_uri(self): if self.path and not self.uri: self._uri_scheme = "path" - self.uri = pip_shims.shims.path_to_url(os.path.abspath(self.path)) + return pip_shims.shims.path_to_url(os.path.abspath(self.path)) elif self.req and getattr(self.req, "url"): - self.uri = self.req.url + return self.req.url @name.default def get_name(self): loc = self.path or self.uri - if loc: - self._uri_scheme = "path" if self.path else "uri" + if loc and not self._uri_scheme: + self._uri_scheme = "path" if self.path else "file" name = None - if getattr(self, "req", None) and getattr(self.req, "name"): - return self.req.name - if self.link and self.link.egg_fragment: + if getattr(self, "req", None) and getattr(self.req, "name") and self.req.name is not None: + if self.is_direct_url: + return self.req.name + if self.link and self.link.egg_fragment and not self._has_hashed_name: return self.link.egg_fragment elif self.link and self.link.is_wheel: from pip_shims import Wheel - + self._has_hashed_name = False return Wheel(self.link.filename).name - if ( - self._uri_scheme != "uri" - and self.path - and self.setup_path - and self.setup_path.exists() - ): - from setuptools.dist import distutils - - old_curdir = os.path.abspath(os.getcwd()) - try: - os.chdir(str(self.setup_path.parent)) - dist = distutils.core.run_setup(self.setup_path.as_posix()) - name = dist.get_name() - except (FileNotFoundError, IOError) as e: - dist = None - except Exception as e: - from pip_shims.shims import make_abstract_dist - - try: - if not isinstance(Path, self.path): - _path = Path(self.path) - else: - _path = self.path - if self.editable: - _ireq = pip_shims.shims.install_req_from_editable(_path.as_uri()) - else: - _ireq = pip_shims.shims.install_req_from_line(_path.as_posix()) - dist = make_abstract_dist(_ireq).get_dist() - name = dist.project_name - except (TypeError, ValueError, AttributeError) as e: - dist = None - finally: - os.chdir(old_curdir) + elif self.link and ((self.link.scheme == "file" or self.editable) or ( + self.path and self.setup_path and os.path.isfile(str(self.setup_path)) + )): + if self.editable: + line = pip_shims.shims.path_to_url(self.setup_py_dir) + _ireq = pip_shims.shims.install_req_from_editable(line) + else: + _ireq = pip_shims.shims.install_req_from_line(Path(self.setup_py_dir).as_posix()) + from .setup_info import SetupInfo + subdir = getattr(self, "subdirectory", None) + setupinfo = SetupInfo.from_ireq(_ireq, subdir=subdir) + if setupinfo: + self.setup_info = setupinfo + setupinfo_dict = setupinfo.as_dict() + setup_name = setupinfo_dict.get("name", None) + if setup_name: + name = setup_name + self._has_hashed_name = False + version = setupinfo_dict.get("version") + if version and not self.version: + self.version = version + build_requires = setupinfo_dict.get("build_requires") + build_backend = setupinfo_dict.get("build_backend") + if build_requires and not self.pyproject_requires: + self.pyproject_requires = build_requires + if build_backend and not self.pyproject_backend: + self.pyproject_backend = build_backend hashed_loc = hashlib.sha256(loc.encode("utf-8")).hexdigest() hashed_name = hashed_loc[-7:] - if not name or name == "UNKNOWN": + if not name or name.lower() == "unknown": self._has_hashed_name = True name = hashed_name - if self.link and not self._has_hashed_name: + else: + self._has_hashed_name = False + name_in_link = getattr(self.link, "egg_fragment", "") if self.link else "" + if not self._has_hashed_name and name_in_link != name: self.link = create_link("{0}#egg={1}".format(self.link.url, name)) return name @link.default def get_link(self): target = "{0}".format(self.uri) - if hasattr(self, "name"): + if hasattr(self, "name") and not self._has_hashed_name: target = "{0}#egg={1}".format(target, self.name) link = create_link(target) return link @req.default def get_requirement(self): - if self.link.is_artifact and not self.editable: - if self._uri_scheme == "uri": - if self.name: - req_str = "{0} @ {1}".format(self.name, self.link.url_without_fragment) - else: - req_str = "{0}".format(self.link.url_without_fragment) - req = init_requirement(req_str) - req.line = req_str - else: - req = init_requirement(normalize_name(self.name)) - else: - req = init_requirement(normalize_name(self.name)) - req.editable = False - req.line = self.link.url_without_fragment - if self.path and self.link and self.link.scheme.startswith("file"): - req.local_file = True - req.path = self.path + req = init_requirement(normalize_name(self.name)) + req.editable = False + req.line = self.link.url_without_fragment + if self.path and self.link and self.link.scheme.startswith("file"): + req.local_file = True + req.path = self.path + if self.editable: req.url = None - self._uri_scheme = "file" else: - req.local_file = False - req.path = None - if not getattr(req, "url", None): + req.url = self.link.url_without_fragment + else: + req.local_file = False + req.path = None req.url = self.link.url_without_fragment if self.editable: req.editable = True @@ -389,6 +399,99 @@ def formatted_path(self): return path.as_posix() return + @classmethod + def create( + cls, path=None, uri=None, editable=False, extras=None, link=None, vcs_type=None, + name=None, req=None, line=None, uri_scheme=None, setup_path=None, relpath=None + ): + import pip_shims.shims + if relpath and not path: + path = relpath + if not path and uri and link.scheme == "file": + path = os.path.abspath(pip_shims.shims.url_to_path(unquote(uri))) + try: + path = get_converted_relative_path(path) + except ValueError: # Vistir raises a ValueError if it can't make a relpath + path = path + if line and not (uri_scheme and uri and link): + vcs_type, uri_scheme, relpath, path, uri, link = cls.get_link_from_line(line) + if not uri_scheme: + uri_scheme = "path" if path else "file" + if path and not uri: + uri = unquote(pip_shims.shims.path_to_url(os.path.abspath(path))) + if not link: + link = create_link(uri) + if not uri: + uri = unquote(link.url_without_fragment) + if not extras: + extras = [] + pyproject_path = None + if path is not None: + pyproject_requires = get_pyproject(os.path.abspath(path)) + pyproject_backend = None + pyproject_requires = None + if pyproject_requires is not None: + pyproject_requires, pyproject_backend = pyproject_requires + if path: + pyproject_path = Path(path).joinpath("pyproject.toml") + if not pyproject_path.exists(): + pyproject_path = None + if not setup_path and path is not None: + setup_path = Path(path).joinpath("setup.py") + if setup_path and isinstance(setup_path, Path): + setup_path = setup_path.as_posix() + creation_kwargs = { + "editable": editable, + "extras": extras, + "pyproject_path": pyproject_path, + "setup_path": setup_path if setup_path else None, + "uri_scheme": uri_scheme, + "link": link, + "uri": uri, + "pyproject_requires": pyproject_requires, + "pyproject_backend": pyproject_backend + } + if vcs_type: + creation_kwargs["vcs_type"] = vcs_type + _line = None + if not name: + import pip_shims.shims + _line = unquote(link.url_without_fragment) if link.url else uri + if editable: + ireq = pip_shims.shims.install_req_from_editable(_line) + else: + _line = path if (uri_scheme and uri_scheme == "path") else _line + ireq = pip_shims.shims.install_req_from_line(_line) + setup_info = SetupInfo.from_ireq(ireq) + setupinfo_dict = setup_info.as_dict() + setup_name = setupinfo_dict.get("name", None) + if setup_name: + name = setup_name + build_requires = setupinfo_dict.get("build_requires", []) + build_backend = setupinfo_dict.get("build_backend", []) + if not creation_kwargs.get("pyproject_requires") and build_requires: + creation_kwargs["pyproject_requires"] = build_requires + if not creation_kwargs.get("pyproject_backend") and build_backend: + creation_kwargs["pyproject_backend"] = build_backend + creation_kwargs["setup_info"] = setup_info + if path or relpath: + creation_kwargs["path"] = relpath if relpath else path + if req: + creation_kwargs["req"] = req + if creation_kwargs.get("req") and line and not getattr(creation_kwargs["req"], "line", None): + creation_kwargs["req"].line = line + if name: + creation_kwargs["name"] = name + cls_inst = cls(**creation_kwargs) + if not _line: + if editable and uri_scheme == "path": + _line = relpath if relpath else path + else: + _line = unquote(cls_inst.link.url_without_fragment) or cls_inst.uri + _line = "{0}#egg={1}".format(line, cls_inst.name) if not cls_inst._has_hashed_name else _line + cls_inst.req.line = line if line else _line + return cls_inst + @classmethod def from_line(cls, line): line = line.strip('"').strip("'") @@ -410,7 +513,6 @@ def from_line(cls, line): name = getattr(req, "name", None) line = getattr(req, "url", None) vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) - setup_path = Path(path) / "setup.py" if path else None arg_dict = { "path": relpath if relpath else path, "uri": unquote(link.url_without_fragment), @@ -418,6 +520,7 @@ def from_line(cls, line): "editable": editable, "setup_path": setup_path, "uri_scheme": prefer, + "line": line } if link and link.is_wheel: from pip_shims import Wheel @@ -427,10 +530,7 @@ def from_line(cls, line): arg_dict["name"] = name elif link.egg_fragment: arg_dict["name"] = link.egg_fragment - if req: - arg_dict["req"] = req - created = cls(**arg_dict) - return created + return cls.create(**arg_dict) @classmethod def from_pipfile(cls, name, pipfile): @@ -466,9 +566,6 @@ def from_pipfile(cls, name, pipfile): if not uri: uri = pip_shims.shims.path_to_url(path) link = create_link(uri) - req = None - if link.is_artifact and not link.is_wheel and not link.scheme.startswith("file"): - req = init_requirement("{0}@{1}".format(name, uri)) arg_dict = { "name": name, "path": path, @@ -477,13 +574,14 @@ def from_pipfile(cls, name, pipfile): "link": link, "uri_scheme": uri_scheme, } - if req: - arg_dict["req"] = req - return cls(**arg_dict) + if link.scheme != "file" and not pipfile.get("editable", False): + arg_dict["line"] = "{0}@ {1}".format(name, link.url_without_fragment) + return cls.create(**arg_dict) @property def line_part(self): if self._uri_scheme and self._uri_scheme == "path": + # We may need any one of these for passing to pip seed = self.path or unquote(self.link.url_without_fragment) or self.uri elif (self._uri_scheme and self._uri_scheme == "file") or ( (self.link.is_artifact or self.link.is_wheel) and self.link.url @@ -491,16 +589,16 @@ def line_part(self): seed = unquote(self.link.url_without_fragment) or self.uri # add egg fragments to remote artifacts (valid urls only) if not self._has_hashed_name and self.is_remote_artifact: - if not self.link.is_wheel and self.link.is_artifact: - seed = "{0}@{1}".format(self.name, seed) - else: - seed += "#egg={0}".format(self.name) + seed += "#egg={0}".format(self.name) editable = "-e " if self.editable else "" return "{0}{1}".format(editable, seed) @property def pipfile_part(self): - excludes = ["_base_line", "_has_hashed_name", "setup_path"] + excludes = [ + "_base_line", "_has_hashed_name", "setup_path", "pyproject_path", + "pyproject_requires", "pyproject_backend", "setup_info" + ] filter_func = lambda k, v: bool(v) is True and k.name not in excludes pipfile_dict = attr.asdict(self, filter=filter_func).copy() name = pipfile_dict.pop("name") @@ -687,10 +785,19 @@ def get_vcs_repo(self, src_dir=None): ) if not self.is_local: vcsrepo.obtain() + pyproject_info = None if self.subdirectory: self.setup_path = os.path.join(checkout_dir, self.subdirectory, "setup.py") + self.pyproject_path = os.path.join(checkout_dir, self.subdirectory, "pyproject.toml") + pyproject_info = get_pyproject(os.path.join(checkout_dir, self.subdirectory)) else: self.setup_path = os.path.join(checkout_dir, "setup.py") + self.pyproject_path = os.path.join(checkout_dir, "pyproject.toml") + pyproject_info = get_pyproject(checkout_dir) + if pyproject_info is not None: + pyproject_requires, pyproject_backend = pyproject_info + self.pyproject_requires = pyproject_requires + self.pyproject_backend = pyproject_backend return vcsrepo def get_commit_hash(self): @@ -846,7 +953,10 @@ def _choose_vcs_source(pipfile): @property def pipfile_part(self): - excludes = ["_repo", "_base_line", "setup_path", "_has_hashed_name"] + excludes = [ + "_repo", "_base_line", "setup_path", "_has_hashed_name", "pyproject_path", + "pyproject_requires", "pyproject_backend", "setup_info" + ] filter_func = lambda k, v: bool(v) is True and k.name not in excludes pipfile_dict = attr.asdict(self, filter=filter_func).copy() if "vcs" in pipfile_dict: @@ -952,7 +1062,6 @@ def from_line(cls, line): line = line.split(" ", 1)[1] if editable else line line, markers = split_markers_from_line(line) line, extras = pip_shims.shims._strip_extras(line) - specifiers = "" if extras: extras = parse_extras(extras) line = line.strip('"').strip("'").strip() @@ -984,7 +1093,6 @@ def from_line(cls, line): spec_idx = min((line.index(match) for match in spec_matches)) name = line[:spec_idx] version = line[spec_idx:] - specifiers = version if not extras: name, extras = pip_shims.shims._strip_extras(name) if extras: @@ -995,7 +1103,7 @@ def from_line(cls, line): req_markers = None if markers: req_markers = PackagingRequirement("fakepkg; {0}".format(markers)) - r.req.marker = getattr(req_markers, "marker", None) + r.req.marker = getattr(req_markers, "marker", None) if req_markers else None r.req.local_file = getattr(r.req, "local_file", False) name = getattr(r.req, "name", None) if not name: @@ -1021,7 +1129,15 @@ def from_line(cls, line): args["extras"] = sorted(dedup([extra.lower() for extra in r.extras])) if hashes: args["hashes"] = hashes - return cls(**args) + cls_inst = cls(**args) + if not cls_inst.is_named and (not cls_inst.editable or cls_inst.req._has_hashed_name): + old_name = cls_inst.req.req.name or cls_inst.req.name + info_dict = cls_inst.run_requires() + calced_name = info_dict.get("name", old_name) + if old_name != calced_name: + cls_inst.req.req.line.replace(old_name, calced_name) + cls_inst.name = cls_inst.req.name = calced_name + return cls_inst @classmethod def from_ireq(cls, ireq): @@ -1074,6 +1190,22 @@ def from_pipfile(cls, name, pipfile): cls_inst = cls(**args) if cls_inst.is_named: cls_inst.req.req.line = cls_inst.as_line() + old_name = cls_inst.req.req.name or cls_inst.req.name + if not cls_inst.is_named and not cls_inst.editable and not name: + if cls_inst.is_vcs: + import pip_shims.shims + ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) + info = SetupInfo.from_ireq(ireq) + if info is not None: + info_dict = info.as_dict() + cls_inst.req.setup_info = info + else: + info_dict = {} + else: + info_dict = cls_inst.run_requires() + found_name = info_dict.get("name", old_name) + if old_name != found_name: + cls_inst.req.req.line.replace(old_name, found_name) return cls_inst def as_line( @@ -1159,6 +1291,10 @@ def get_requirement(self): def constraint_line(self): return self.as_line() + @property + def is_direct_url(self): + return self.is_file_or_url and self.req.is_direct_url + def as_pipfile(self): good_keys = ( "hashes", @@ -1294,6 +1430,26 @@ def find_all_matches(self, sources=None, finder=None): finder = get_finder(sources=sources) return find_all_matches(finder, self.as_ireq()) + def run_requires(self, sources=None, finder=None): + if self.req and self.req.setup_info is not None: + info_dict = self.req.setup_info.as_dict() + else: + from .setup_info import SetupInfo + if not finder: + from .dependencies import get_finder + finder = get_finder(sources=sources) + info = SetupInfo.from_requirement(self, finder=finder) + if info is None: + return {} + info_dict = info.get_info() + if self.req and not self.req.setup_info: + self.req.setup_info = info + if self.req._has_hashed_name and info_dict.get("name"): + self.req.name = self.name = info_dict["name"] + if self.req.req.name != info_dict["name"]: + self.req.req.name = info_dict["name"] + return info_dict + def merge_markers(self, markers): if not isinstance(markers, Marker): markers = Marker(markers) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py new file mode 100644 index 0000000000..319dd6bdcb --- /dev/null +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -0,0 +1,378 @@ +# -*- coding=utf-8 -*- +import configparser +import contextlib +import os + +import attr +import packaging.version +import packaging.specifiers +import packaging.utils + +try: + from setuptools.dist import distutils +except ImportError: + import distutils + +from appdirs import user_cache_dir +from six.moves.urllib.parse import unquote +from vistir.compat import Path +from vistir.contextmanagers import cd +from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p + +from .utils import init_requirement, get_pyproject + +try: + from os import scandir +except ImportError: + from scandir import scandir + + +CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) + + +@contextlib.contextmanager +def _suppress_distutils_logs(): + """Hack to hide noise generated by `setup.py develop`. + + There isn't a good way to suppress them now, so let's monky-patch. + See https://bugs.python.org/issue25392. + """ + + f = distutils.log.Log._log + + def _log(log, level, msg, args): + if level >= distutils.log.ERROR: + f(log, level, msg, args) + + distutils.log.Log._log = _log + yield + distutils.log.Log._log = f + + +@ensure_mkdir_p(mode=0o775) +def _get_src_dir(): + src = os.environ.get("PIP_SRC") + if src: + return src + virtual_env = os.environ.get("VIRTUAL_ENV") + if virtual_env: + return os.path.join(virtual_env, "src") + return os.path.join(os.getcwd(), "src") # Match pip's behavior. + + +def _prepare_wheel_building_kwargs(ireq): + download_dir = os.path.join(CACHE_DIR, "pkgs") + mkdir_p(download_dir) + + wheel_download_dir = os.path.join(CACHE_DIR, "wheels") + mkdir_p(wheel_download_dir) + + if ireq.source_dir is not None: + src_dir = ireq.source_dir + elif ireq.editable: + src_dir = _get_src_dir() + else: + src_dir = create_tracked_tempdir(prefix="reqlib-src") + + # This logic matches pip's behavior, although I don't fully understand the + # intention. I guess the idea is to build editables in-place, otherwise out + # of the source tree? + if ireq.editable: + build_dir = src_dir + else: + build_dir = create_tracked_tempdir(prefix="reqlib-build") + + return { + "build_dir": build_dir, + "src_dir": src_dir, + "download_dir": download_dir, + "wheel_download_dir": wheel_download_dir, + } + + +def iter_egginfos(path, pkg_name=None): + for entry in scandir(path): + if entry.is_dir(): + if not entry.name.endswith("egg-info"): + for dir_entry in iter_egginfos(entry.path, pkg_name=pkg_name): + yield dir_entry + elif pkg_name is None or entry.name.startswith(pkg_name): + yield entry + + +def find_egginfo(target, pkg_name=None): + egg_dirs = (egg_dir for egg_dir in iter_egginfos(target, pkg_name=pkg_name)) + if pkg_name: + return next(iter(egg_dirs), None) + else: + for egg_dir in egg_dirs: + yield egg_dir + + +def get_metadata(path, pkg_name=None): + if pkg_name: + pkg_name = packaging.utils.canonicalize_name(pkg_name) + egg_dir = next(iter(find_egginfo(path, pkg_name=pkg_name)), None) + if egg_dir is not None: + import pkg_resources + + egg_dir = os.path.abspath(egg_dir) + base_dir = os.path.dirname(egg_dir) + path_metadata = pkg_resources.PathMetadata(base_dir, egg_dir) + dist = next( + iter(pkg_resources.distributions_from_metadata(path_metadata.egg_info)), + None, + ) + if dist: + requires = dist.requires() + dep_map = dist._build_dep_map() + deps = [] + for k in dep_map.keys(): + if k is None: + deps.extend(dep_map.get(k)) + continue + else: + _deps = dep_map.get(k) + k = k.replace(":", "; ") + _deps = [ + pkg_resources.Requirement.parse("{0}{1}".format(str(req), k)) + for req in _deps + ] + deps.extend(_deps) + return { + "name": dist.project_name, + "version": dist.version, + "requires": requires, + } + + +@attr.s(slots=True) +class SetupInfo(object): + name = attr.ib(type=str, default=None) + base_dir = attr.ib(type=Path, default=None) + version = attr.ib(type=packaging.version.Version, default=None) + extras = attr.ib(type=list, default=attr.Factory(list)) + requires = attr.ib(type=dict, default=attr.Factory(dict)) + build_requires = attr.ib(type=list, default=attr.Factory(list)) + build_backend = attr.ib(type=list, default=attr.Factory(list)) + setup_requires = attr.ib(type=dict, default=attr.Factory(list)) + python_requires = attr.ib(type=packaging.specifiers.SpecifierSet, default=None) + extras = attr.ib(type=dict, default=attr.Factory(dict)) + setup_cfg = attr.ib(type=Path, default=None) + setup_py = attr.ib(type=Path, default=None) + pyproject = attr.ib(type=Path, default=None) + ireq = attr.ib(default=None) + extra_kwargs = attr.ib(default=attr.Factory(dict), type=dict) + + def parse_setup_cfg(self): + if self.setup_cfg is not None and self.setup_cfg.exists(): + default_opts = { + "metadata": {"name": "", "version": ""}, + "options": { + "install_requires": "", + "python_requires": "", + "build_requires": "", + "setup_requires": "", + "extras": "", + }, + } + parser = configparser.ConfigParser(default_opts) + parser.read(self.setup_cfg.as_posix()) + if parser.has_option("metadata", "name"): + name = parser.get("metadata", "name") + if not self.name and name is not None: + self.name = name + if parser.has_option("metadata", "version"): + version = parser.get("metadata", "version") + if not self.version and version is not None: + self.version = version + if parser.has_option("options", "install_requires"): + self.requires.update( + { + dep.strip(): init_requirement(dep.strip()) + for dep in parser.get("options", "install_requires").split("\n") + if dep + } + ) + if parser.has_option("options", "python_requires"): + python_requires = parser.get("options", "python_requires") + if python_requires and not self.python_requires: + self.python_requires = python_requires + if parser.has_option("options", "extras_require"): + self.extras.update( + { + section: [ + dep.strip() + for dep in parser.get( + "options.extras_require", section + ).split("\n") + if dep + ] + for section in parser.options("options.extras_require") + } + ) + + def run_setup(self): + if self.setup_py is not None and self.setup_py.exists(): + with cd(self.setup_py.parent), _suppress_distutils_logs(): + from setuptools.dist import distutils + + dist = distutils.core.run_setup( + self.setup_py.as_posix(), ["egg_info", "--egg-base", self.base_dir] + ) + name = dist.get_name() + if name: + self.name = name + if dist.python_requires and not self.python_requires: + self.python_requires = packaging.specifiers.SpecifierSet( + dist.python_requires + ) + if dist.extras_require and not self.extras: + self.extras = dist.extras_require + install_requires = dist.get_requires() + if not install_requires: + install_requires = dist.install_requires + if install_requires and not self.requires: + requirements = [init_requirement(req) for req in install_requires] + self.requires.update({req.key: req for req in requirements}) + if dist.setup_requires and not self.setup_requires: + self.setup_requires = dist.setup_requires + if not self.version: + self.version = dist.get_version() + + def get_egg_metadata(self): + if self.setup_py is not None and self.setup_py.exists(): + metadata = get_metadata(self.setup_py.parent.as_posix(), pkg_name=self.name) + if metadata: + if not self.name: + self.name = metadata.get("name", self.name) + if not self.version: + self.version = metadata.get("version", self.version) + self.requires.update( + {req.key: req for req in metadata.get("requires", {})} + ) + + def run_pyproject(self): + if self.pyproject and self.pyproject.exists(): + result = get_pyproject(self.pyproject.parent) + if result is not None: + requires, backend = result + if backend: + self.build_backend = backend + if requires and not self.build_requires: + self.build_requires = requires + + def get_info(self): + if self.setup_cfg and self.setup_cfg.exists(): + self.parse_setup_cfg() + if self.setup_py and self.setup_py.exists(): + if not self.requires or not self.name: + try: + self.run_setup() + except Exception as e: + self.get_egg_metadata() + if not self.requires or not self.name: + self.get_egg_metadata() + + if self.pyproject and self.pyproject.exists(): + self.run_pyproject() + return self.as_dict() + + def as_dict(self): + prop_dict = { + "name": self.name, + "version": self.version, + "base_dir": self.base_dir, + "ireq": self.ireq, + "build_backend": self.build_backend, + "build_requires": self.build_requires, + "requires": self.requires, + "setup_requires": self.setup_requires, + "python_requires": self.python_requires, + "extras": self.extras, + "extra_kwargs": self.extra_kwargs, + "setup_cfg": self.setup_cfg, + "setup_py": self.setup_py, + "pyproject": self.pyproject, + } + return {k: v for k, v in prop_dict.items() if v} + + @classmethod + def from_requirement(cls, requirement, finder=None): + ireq = requirement.as_ireq() + subdir = getattr(requirement.req, "subdirectory", None) + return cls.from_ireq(ireq, subdir=subdir, finder=finder) + + @classmethod + def from_ireq(cls, ireq, subdir=None, finder=None): + import pip_shims.shims + + if ireq.link.is_wheel: + return + if not finder: + from .dependencies import get_finder + + finder = get_finder() + kwargs = _prepare_wheel_building_kwargs(ireq) + ireq.populate_link(finder, False, False) + ireq.ensure_has_source_dir(kwargs["build_dir"]) + if not ( + ireq.editable + and pip_shims.shims.is_file_url(ireq.link) + and not ireq.link.is_artifact + ): + if ireq.is_wheel: + only_download = True + download_dir = kwargs["wheel_download_dir"] + else: + only_download = False + download_dir = kwargs["download_dir"] + ireq_src_dir = None + if ireq.link.scheme == "file": + path = pip_shims.shims.url_to_path(unquote(ireq.link.url_without_fragment)) + if pip_shims.shims.is_installable_dir(path): + ireq_src_dir = path + if not ireq.editable or not (pip_shims.is_file_url(ireq.link) and ireq_src_dir): + pip_shims.shims.unpack_url( + ireq.link, + ireq.source_dir, + download_dir, + only_download=only_download, + session=finder.session, + hashes=ireq.hashes(False), + progress_bar="off", + ) + if ireq.editable: + created = cls.create( + ireq.source_dir, subdirectory=subdir, ireq=ireq, kwargs=kwargs + ) + else: + build_dir = ireq.build_location(kwargs["build_dir"]) + ireq._temp_build_dir.path = kwargs["build_dir"] + created = cls.create( + build_dir, subdirectory=subdir, ireq=ireq, kwargs=kwargs + ) + created.get_info() + return created + + @classmethod + def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): + if not base_dir or base_dir is None: + return + + creation_kwargs = {"extra_kwargs": kwargs} + if not isinstance(base_dir, Path): + base_dir = Path(base_dir) + creation_kwargs["base_dir"] = base_dir.as_posix() + pyproject = base_dir.joinpath("pyproject.toml") + + if subdirectory is not None: + base_dir = base_dir.joinpath(subdirectory) + setup_py = base_dir.joinpath("setup.py") + setup_cfg = base_dir.joinpath("setup.cfg") + creation_kwargs["pyproject"] = pyproject + creation_kwargs["setup_py"] = setup_py + creation_kwargs["setup_cfg"] = setup_cfg + if ireq: + creation_kwargs["ireq"] = ireq + return cls(**creation_kwargs) diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index aa7ffd681c..2b47ee9bb0 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import +import io import os import sys @@ -9,6 +10,7 @@ from operator import attrgetter import six +import tomlkit from attr import validators from first import first @@ -17,7 +19,7 @@ from vistir.misc import dedup -from ..utils import SCHEME_LIST, VCS_LIST, is_star, strip_ssh_from_git_uri, add_ssh_scheme_to_git_uri +from ..utils import SCHEME_LIST, VCS_LIST, is_star, add_ssh_scheme_to_git_uri HASH_STRING = " --hash={0}" @@ -93,6 +95,7 @@ def build_vcs_link(vcs, uri, name=None, ref=None, subdirectory=None, extras=None if extras: extras = extras_to_string(extras) uri = "{0}{1}".format(uri, extras) + # if subdirectory: if subdirectory: uri = "{0}&subdirectory={1}".format(uri, subdirectory) return create_link(uri) @@ -112,6 +115,42 @@ def get_version(pipfile_entry): return "" +def get_pyproject(path): + from vistir.compat import Path + if not path: + return + if not isinstance(path, Path): + path = Path(path) + if not path.is_dir(): + path = path.parent + pp_toml = path.joinpath("pyproject.toml") + setup_py = path.joinpath("setup.py") + if not pp_toml.exists(): + if setup_py.exists(): + return None + else: + pyproject_data = {} + with io.open(pp_toml.as_posix(), encoding="utf-8") as fh: + pyproject_data = tomlkit.loads(fh.read()) + build_system = pyproject_data.get("build-system", None) + if build_system is None: + if setup_py.exists(): + requires = ["setuptools", "wheel"] + backend = "setuptools.build_meta" + else: + requires = ["setuptools>=38.2.5", "wheel"] + backend = "setuptools.build_meta" + build_system = { + "requires": requires, + "build-backend": backend + } + pyproject_data["build_system"] = build_system + else: + requires = build_system.get("requires") + backend = build_system.get("build-backend") + return (requires, backend) + + def split_markers_from_line(line): """Split markers from a dependency""" if not any(line.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index dd8cc3a449..6a15db3f85 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -4,7 +4,6 @@ import pip_shims - @attr.s class VCSRepository(object): url = attr.ib() @@ -32,7 +31,7 @@ def is_local(self): def obtain(self): if (os.path.exists(self.checkout_directory) and not - self.repo_instance.is_repository_directory(self.checkout_directory)): + self.repo_instance.is_repository_directory(self.checkout_directory)): self.repo_instance.unpack(self.checkout_directory) elif not os.path.exists(self.checkout_directory): self.repo_instance.obtain(self.checkout_directory) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 0ab0ab22ed..6ed95b3e7c 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -56,8 +56,17 @@ def check_github_ssh(): return res +def check_for_mercurial(): + c = delegator.run("hg --help") + if c.return_code != 0: + return False + else: + return True + + TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') +WE_HAVE_HG = check_for_mercurial() prepare_pypi_packages(PYPI_VENDOR_DIR) @@ -66,6 +75,8 @@ def pytest_runtest_setup(item): pytest.skip('requires internet') if item.get_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS: pytest.skip('requires github ssh') + if item.get_marker('needs_hg') is not None and not WE_HAVE_HG: + pytest.skip('requires mercurial') @pytest.fixture @@ -100,6 +111,8 @@ def isolate(pathlib_tmpdir): os.environ["GIT_AUTHOR_EMAIL"] = fs_str("pipenv@pipenv.org") mkdir_p(os.path.join(home_dir, ".virtualenvs")) os.environ["WORKON_HOME"] = fs_str(os.path.join(home_dir, ".virtualenvs")) + global WE_HAVE_GITHUB_SSH_KEYS + WE_HAVE_GITHUB_SSH_KEYS = check_github_ssh() WE_HAVE_INTERNET = check_internet() diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index e19a140027..5f493cac9b 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -84,7 +84,7 @@ def test_uninstall_all_local_files(PipenvInstance, testsroot): # Not sure where travis/appveyor run tests from source_path = os.path.abspath(os.path.join(testsroot, "test_artifacts", file_name)) - with PipenvInstance() as p: + with PipenvInstance(chdir=True) as p: shutil.copy(source_path, os.path.join(p.path, file_name)) os.mkdir(os.path.join(p.path, "requests")) c = p.pipenv("install {}".format(file_name)) @@ -92,7 +92,9 @@ def test_uninstall_all_local_files(PipenvInstance, testsroot): c = p.pipenv("uninstall --all") assert c.return_code == 0 assert "requests" in c.out - assert "requests" not in p.pipfile["packages"] + # Uninstall --all is not supposed to remove things from the pipfile + # Note that it didn't before, but that instead local filenames showed as hashes + assert "requests" in p.pipfile["packages"] @pytest.mark.run From dec7be54d716d078390232c5ac5a78991ca3e0b1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 16:39:23 -0500 Subject: [PATCH 21/71] Introduce `pipenv.environments.Environment` - Specific construct for isolationg operations Signed-off-by: Dan Ryan --- pipenv/core.py | 107 ++++---- pipenv/environment.py | 618 ++++++++++++++++++++++++++++++++++++++++++ pipenv/project.py | 176 +++--------- 3 files changed, 713 insertions(+), 188 deletions(-) create mode 100644 pipenv/environment.py diff --git a/pipenv/core.py b/pipenv/core.py index 3cbd1645f1..021b0f3ca1 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -915,7 +915,15 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): project_file_name = os.path.join(project.virtualenv_location, ".project") with open(project_file_name, "w") as f: f.write(vistir.misc.fs_str(project.project_directory)) - fix_venv_site(project.env_paths["lib"]) + from .environment import Environment + sources = project.pipfile_sources + project._environment = Environment( + prefix=project.get_location_for_virtualenv(), + is_venv=True, + sources=sources, + pipfile=project.parsed_pipfile + ) + project._environment.add_dist("pipenv") # Say where the virtualenv is. do_where(virtualenv=True, bare=False) @@ -1129,7 +1137,7 @@ def do_purge(bare=False, downloads=False, allow_global=False): # Remove comments from the output, if any. installed = set([ - pep423_name(pkg.project_name) for pkg in project.get_installed_packages() + pep423_name(pkg.project_name) for pkg in project.environment.get_installed_packages() ]) bad_pkgs = set([pep423_name(pkg) for pkg in BAD_PACKAGES]) # Remove setuptools, pip, etc from targets for removal @@ -1662,7 +1670,7 @@ def do_outdated(pypi_mirror=None): packages = {} package_info = namedtuple("PackageInfo", ["name", "installed", "available"]) - installed_packages = project.get_installed_packages() + installed_packages = project.environment.get_installed_packages() outdated_packages = { canonicalize_name(pkg.project_name): package_info (pkg.project_name, pkg.parsed_version, pkg.latest_version) @@ -1916,7 +1924,14 @@ def do_install( # make a tuple of (display_name, entry) pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] - + if not system and not project.virtualenv_exists: + with create_spinner("Creating virtualenv...") as sp: + try: + do_create_virtualenv(pypi_mirror=pypi_mirror) + except KeyboardInterrupt: + cleanup_virtualenv(bare=(not environments.is_verbose())) + sys.exit(1) + sp.write_err("Ok...") for pkg_line in pkg_list: click.echo( crayons.normal( @@ -1925,8 +1940,7 @@ def do_install( ) ) # pip install: - with vistir.contextmanagers.temp_environ(), \ - create_spinner("Installing...") as sp: + with vistir.contextmanagers.temp_environ(), create_spinner("Installing...") as sp: os.environ["PIP_USER"] = vistir.compat.fs_str("0") try: pkg_requirement = Requirement.from_line(pkg_line) @@ -2055,30 +2069,17 @@ def do_uninstall( package_map = { canonicalize_name(p): p for p in packages if p } - installed_package_names = set([ - canonicalize_name(pkg.project_name) for pkg in project.get_installed_packages() - ]) + installed_package_names = project.installed_package_names # Intelligently detect if --dev should be used or not. lockfile_packages = set() if project.lockfile_exists: - develop = set( - [canonicalize_name(k) for k in project.lockfile_content["develop"].keys()] - ) - default = set( - [canonicalize_name(k) for k in project.lockfile_content["default"].keys()] - ) - lockfile_packages |= develop | default + project_pkg_names = project.lockfile_package_names else: - develop = set( - [canonicalize_name(k) for k in project.dev_packages.keys()] - ) - default = set( - [canonicalize_name(k) for k in project.packages.keys()] - ) + project_pkg_names = project.pipfile_package_names pipfile_remove = True # Uninstall [dev-packages], if --dev was provided. if all_dev: - if "dev-packages" not in project.parsed_pipfile and not develop: + if "dev-packages" not in project.parsed_pipfile and not project_pkg_names["dev"]: click.echo( crayons.normal( "No {0} to uninstall.".format(crayons.red("[dev-packages]")), @@ -2091,28 +2092,33 @@ def do_uninstall( fix_utf8("Un-installing {0}…".format(crayons.red("[dev-packages]"))), bold=True ) ) - package_names = develop + package_names = project_pkg_names["dev"] + # Remove known "bad packages" from the list. - bad_pkgs = set([canonicalize_name(pkg) for pkg in BAD_PACKAGES]) - for bad_package in BAD_PACKAGES: - normalized_bad_pkg = canonicalize_name(bad_package) - if normalized_bad_pkg in package_map: - if environments.is_verbose(): - click.echo("Ignoring {0}.".format(bad_package), err=True) - pkg_name_index = package_names.index(package_map[normalized_bad_pkg]) - del package_names[pkg_name_index] - used_packages = develop | default & installed_package_names + bad_pkgs = get_canonical_names(BAD_PACKAGES) + ignored_packages = bad_pkgs & set(list(package_map.keys())) + for ignored_pkg in ignored_packages: + if environments.is_verbose(): + click.echo("Ignoring {0}.".format(ignored_pkg), err=True) + pkg_name_index = package_names.index(package_map[ignored_pkg]) + del package_names[pkg_name_index] + + used_packages = project_pkg_names["combined"] & installed_package_names failure = False packages_to_remove = set() if all: - package_names = develop | default click.echo( - crayons.normal(fix_utf8("Un-installing all packages from virtualenv…"), bold=True) + crayons.normal( + fix_utf8("Un-installing all {0} and {1}…".format( + crayons.red("[dev-packages]"), + crayons.red("[packages]"), + )), bold=True + ) ) - do_purge(allow_global=system) - return + do_purge(bare=False, allow_global=system) + sys.exit(0) if all_dev: - package_names = develop + package_names = project_pkg_names["dev"] else: package_names = set([pkg_name for pkg_name in package_names]) selected_pkg_map = { @@ -2120,7 +2126,7 @@ def do_uninstall( } packages_to_remove = [ p for normalized, p in selected_pkg_map.items() - if (normalized in used_packages and normalized not in bad_pkgs) + if normalized in (used_packages - bad_pkgs) ] for normalized, package_name in selected_pkg_map.items(): click.echo( @@ -2130,15 +2136,16 @@ def do_uninstall( ) # Uninstall the package. if package_name in packages_to_remove: - cmd = "{0} uninstall {1} -y".format( - escape_grouped_arguments(which_pip(allow_global=system)), package_name, - ) - if environments.is_verbose(): - click.echo("$ {0}".format(cmd)) - c = delegator.run(cmd) - click.echo(crayons.blue(c.out)) - if c.return_code != 0: - failure = True + with project.environment.activated(): + cmd = "{0} uninstall {1} -y".format( + escape_grouped_arguments(which_pip(allow_global=system)), package_name, + ) + if environments.is_verbose(): + click.echo("$ {0}".format(cmd)) + c = delegator.run(cmd) + click.echo(crayons.blue(c.out)) + if c.return_code != 0: + failure = True if not failure and pipfile_remove: in_packages = project.get_package_name_in_pipfile(package_name, dev=False) in_dev_packages = project.get_package_name_in_pipfile( @@ -2646,9 +2653,9 @@ def do_clean(ctx, three=None, python=None, dry_run=False, bare=False, pypi_mirro ensure_lockfile(pypi_mirror=pypi_mirror) # Make sure that the virtualenv's site packages are configured correctly # otherwise we may end up removing from the global site packages directory - fix_venv_site(project.env_paths["lib"]) installed_package_names = [ - canonicalize_name(pkg.project_name) for pkg in project.get_installed_packages() + canonicalize_name(pkg.project_name) for pkg + in project.environment.get_installed_packages() ] # Remove known "bad packages" from the list. for bad_package in BAD_PACKAGES: diff --git a/pipenv/environment.py b/pipenv/environment.py new file mode 100644 index 0000000000..db0e22aa40 --- /dev/null +++ b/pipenv/environment.py @@ -0,0 +1,618 @@ +# -*- coding=utf-8 -*- + +import contextlib +import importlib +import json +import os +import sys +import operator +import pkg_resources +import six + +from distutils.sysconfig import get_python_lib +from sysconfig import get_paths + +from cached_property import cached_property + +import vistir +import pipenv + +BASE_WORKING_SET = pkg_resources.WorkingSet(sys.path) + + +class Environment(object): + def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, sources=None): + super(Environment, self).__init__() + self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} + self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET + self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + if not sources: + sources = [] + self.sources = sources + self.extra_dists = [] + prefix = prefix if prefix else sys.prefix + self.prefix = vistir.compat.Path(prefix) + + def safe_import(self, name): + """Helper utility for reimporting previously imported modules while inside the env""" + module = None + if name not in self._modules: + self._modules[name] = importlib.import_module(name) + module = self._modules[name] + if not module: + dist = next(iter( + dist for dist in self.base_working_set if dist.project_name == name + ), None) + if dist: + dist.activate() + module = importlib.import_module(name) + if name in sys.modules: + try: + six.moves.reload_module(module) + six.moves.reload_module(sys.modules[name]) + except TypeError: + del sys.modules[name] + sys.modules[name] = self._modules[name] + return module + + @classmethod + def resolve_dist(cls, dist, working_set): + """Given a local distribution and a working set, returns all dependencies from the set. + + :param dist: A single distribution to find the dependencies of + :type dist: :class:`pkg_resources.Distribution` + :param working_set: A working set to search for all packages + :type working_set: :class:`pkg_resources.WorkingSet` + :return: A set of distributions which the package depends on, including the package + :rtype: set(:class:`pkg_resources.Distribution`) + """ + + deps = set() + deps.add(dist) + try: + reqs = dist.requires() + except AttributeError: + return deps + for req in reqs: + dist = working_set.find(req) + deps |= cls.resolve_dist(dist, working_set) + return deps + + def add_dist(self, dist_name): + dist = pkg_resources.get_distribution(pkg_resources.Requirement(dist_name)) + extras = self.resolve_dist(dist, self.base_working_set) + if extras: + self.extra_dists.extend(extras) + + @cached_property + def python_version(self): + with self.activated(): + from sysconfig import get_python_version + py_version = get_python_version() + return py_version + + @property + def python_info(self): + include_dir = self.prefix / "include" + python_path = next(iter(list(include_dir.iterdir())), None) + if python_path and python_path.name.startswith("python"): + python_version = python_path.name.replace("python", "") + py_version_short, abiflags = python_version[:3], python_version[3:] + return {"py_version_short": py_version_short, "abiflags": abiflags} + return {} + + @cached_property + def base_paths(self): + """ + Returns the context appropriate paths for the environment. + + :return: A dictionary of environment specific paths to be used for installation operations + :rtype: dict + + .. note:: The implementation of this is borrowed from a combination of pip and + virtualenv and is likely to change at some point in the future. + + >>> from pipenv.core import project + >>> from pipenv.environment import Environment + >>> env = Environment(prefix=project.virtualenv_location, is_venv=True, sources=project.sources) + >>> import pprint + >>> pprint.pprint(env.base_paths) + {'PATH': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/bin::/bin:/usr/bin', + 'PYTHONPATH': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'data': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW', + 'include': '/home/hawk/.pyenv/versions/3.7.1/include/python3.7m', + 'libdir': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'platinclude': '/home/hawk/.pyenv/versions/3.7.1/include/python3.7m', + 'platlib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'platstdlib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7', + 'prefix': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW', + 'purelib': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/lib/python3.7/site-packages', + 'scripts': '/home/hawk/.virtualenvs/pipenv-MfOPs1lW/bin', + 'stdlib': '/home/hawk/.pyenv/versions/3.7.1/lib/python3.7'} + """ + + prefix = self.prefix.as_posix() + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) + paths["PATH"] = paths["scripts"] + os.pathsep + os.defpath + if "prefix" not in paths: + paths["prefix"] = prefix + purelib = get_python_lib(plat_specific=0, prefix=prefix) + platlib = get_python_lib(plat_specific=1, prefix=prefix) + if purelib == platlib: + lib_dirs = purelib + else: + lib_dirs = purelib + os.pathsep + platlib + paths["libdir"] = purelib + paths["purelib"] = purelib + paths["platlib"] = platlib + paths['PYTHONPATH'] = lib_dirs + paths["libdirs"] = lib_dirs + return paths + + @cached_property + def script_basedir(self): + """Path to the environment scripts dir""" + script_dir = self.base_paths["scripts"] + return script_dir + + @property + def python(self): + """Path to the environment python""" + py = vistir.compat.Path(self.base_paths["scripts"]).joinpath("python").as_posix() + if not py: + return vistir.compat.Path(sys.executable).as_posix() + return py + + @cached_property + def sys_path(self): + """The system path inside the environment + + :return: The :data:`sys.path` from the environment + :rtype: list + """ + + current_executable = vistir.compat.Path(sys.executable).as_posix() + if not self.python or self.python == current_executable: + return sys.path + elif any([sys.prefix == self.prefix, not self.is_venv]): + return sys.path + cmd_args = [self.python, "-c", "import json, sys; print(json.dumps(sys.path))"] + path, _ = vistir.misc.run(cmd_args, return_object=False, nospin=True, block=True, combine_stderr=False) + path = json.loads(path.strip()) + return path + + @cached_property + def system_paths(self): + paths = {} + paths = get_paths() + return paths + + @cached_property + def sys_prefix(self): + """The prefix run inside the context of the environment + + :return: The python prefix inside the environment + :rtype: :data:`sys.prefix` + """ + + command = [self.python, "-c" "import sys; print(sys.prefix)"] + c = vistir.misc.run(command, return_object=True, block=True, nospin=True) + sys_prefix = vistir.compat.Path(vistir.misc.to_text(c.out).strip()).as_posix() + return sys_prefix + + @cached_property + def paths(self): + paths = {} + with vistir.contextmanagers.temp_environ(), vistir.contextmanagers.temp_path(): + os.environ["PYTHONIOENCODING"] = vistir.compat.fs_str("utf-8") + os.environ["PYTHONDONTWRITEBYTECODE"] = vistir.compat.fs_str("1") + paths = self.base_paths + os.environ["PATH"] = paths["PATH"] + os.environ["PYTHONPATH"] = paths["PYTHONPATH"] + if "headers" not in paths: + paths["headers"] = paths["include"] + return paths + + @property + def scripts_dir(self): + return self.paths["scripts"] + + @property + def libdir(self): + purelib = self.paths.get("purelib", None) + if purelib and os.path.exists(purelib): + return "purelib", purelib + return "platlib", self.paths["platlib"] + + def get_distributions(self): + """Retrives the distributions installed on the library path of the environment + + :return: A set of distributions found on the library path + :rtype: iterator + """ + + pkg_resources = self.safe_import("pkg_resources") + return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) + + def find_egg(self, egg_dist): + import site + site_packages = get_python_lib() + search_filename = "{0}.egg-link".format(egg_dist.project_name) + try: + user_site = site.getusersitepackages() + except AttributeError: + user_site = site.USER_SITE + search_locations = [site_packages, user_site] + for site_directory in search_locations: + egg = os.path.join(site_directory, search_filename) + if os.path.isfile(egg): + return egg + + def locate_dist(self, dist): + location = self.find_egg(dist) + if not location: + return dist.location + + def dist_is_in_project(self, dist): + from .project import _normalized + prefix = _normalized(self.base_paths["prefix"]) + location = self.locate_dist(dist) + if not location: + return False + return _normalized(location).startswith(prefix) + + def get_installed_packages(self): + workingset = self.get_working_set() + packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] + return packages + + def get_finder(self): + from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder + from .environments import PIPENV_CACHE_DIR + index_urls = [source.get("url") for source in self.sources] + + class PipCommand(Command): + name = "PipCommand" + + pip_command = PipCommand() + index_opts = cmdoptions.make_option_group( + index_group, pip_command.parser + ) + cmd_opts = pip_command.cmd_opts + pip_command.parser.insert_option_group(0, index_opts) + pip_command.parser.insert_option_group(0, cmd_opts) + pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources, []) + pip_options, _ = pip_command.parser.parse_args(pip_args) + pip_options.cache_dir = PIPENV_CACHE_DIR + pip_options.pre = self.pipfile.get("pre", False) + with pip_command._build_session(pip_options) as session: + finder = PackageFinder( + find_links=pip_options.find_links, + index_urls=index_urls, allow_all_prereleases=pip_options.pre, + trusted_hosts=pip_options.trusted_hosts, + process_dependency_links=pip_options.process_dependency_links, + session=session + ) + yield finder + + def get_package_info(self): + dependency_links = [] + packages = self.get_installed_packages() + # This code is borrowed from pip's current implementation + for dist in packages: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) + + with self.get_finder() as finder: + finder.add_dependency_links(dependency_links) + + for dist in packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not finder.pip_options.pre: + # Remove prereleases + all_candidates = [ + candidate for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + if not all_candidates: + continue + best_candidate = max(all_candidates, key=finder._candidate_sort_key) + remote_version = best_candidate.version + if best_candidate.location.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + yield dist + + def get_outdated_packages(self): + return [ + pkg for pkg in self.get_package_info() + if pkg.latest_version._version > pkg.parsed_version._version + ] + + def get_package_requirements(self): + from .vendor.pipdeptree import flatten, sorted_tree, build_dist_index, construct_tree + dist_index = build_dist_index(self.get_installed_packages()) + tree = sorted_tree(construct_tree(dist_index)) + branch_keys = set(r.key for r in flatten(tree.values())) + nodes = [p for p in tree.keys() if p.key not in branch_keys] + key_tree = dict((k.key, v) for k, v in tree.items()) + get_children = lambda n: key_tree.get(n.key, []) + + def aux(node, parent=None, chain=None): + if chain is None: + chain = [node.project_name] + + d = node.as_dict() + if parent: + d['required_version'] = node.version_spec if node.version_spec else 'Any' + else: + d['required_version'] = d['installed_version'] + + d['dependencies'] = [ + aux(c, parent=node, chain=chain+[c.project_name]) + for c in get_children(node) + if c.project_name not in chain + ] + + return d + return [aux(p) for p in nodes] + + def get_working_set(self): + """Retrieve the working set of installed packages for the environment. + + :return: The working set for the environment + :rtype: :class:`pkg_resources.WorkingSet` + """ + + working_set = pkg_resources.WorkingSet(self.sys_path) + return working_set + + def is_installed(self, pkgname): + """Given a package name, returns whether it is installed in the environment + + :param str pkgname: The name of a package + :return: Whether the supplied package is installed in the environment + :rtype: bool + """ + + return any(d for d in self.get_distributions() if d.project_name == pkgname) + + def run(self, cmd, cwd=os.curdir): + """Run a command with :class:`~subprocess.Popen` in the context of the environment + + :param cmd: A command to run in the environment + :type cmd: str or list + :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` + :return: A finished command object + :rtype: :class:`~subprocess.Popen` + """ + + c = None + with self.activated(): + script = vistir.cmdparse.Script.parse(cmd) + c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd) + return c + + def run_py(self, cmd, cwd=os.curdir): + """Run a python command in the enviornment context. + + :param cmd: A command to run in the environment - runs with `python -c` + :type cmd: str or list + :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` + :return: A finished command object + :rtype: :class:`~subprocess.Popen` + """ + + c = None + if isinstance(cmd, six.string_types): + script = vistir.cmdparse.Script.parse("{0} -c {1}".format(self.python, cmd)) + else: + script = vistir.cmdparse.Script.parse([self.python, "-c"] + list(cmd)) + with self.activated(): + c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd) + return c + + def run_activate_this(self): + """Runs the environment's inline activation script""" + if self.is_venv: + activate_this = os.path.join(self.scripts_dir, "activate_this.py") + if not os.path.isfile(activate_this): + raise OSError("No such file: {0!s}".format(activate_this)) + with open(activate_this, "r") as f: + code = compile(f.read(), activate_this, "exec") + exec(code, dict(__file__=activate_this)) + + @contextlib.contextmanager + def activated(self, include_extras=True, extra_dists=None): + """Helper context manager to activate the environment. + + This context manager will set the following variables for the duration + of its activation: + + * sys.prefix + * sys.path + * os.environ["VIRTUAL_ENV"] + * os.environ["PATH"] + + In addition, it will make any distributions passed into `extra_dists` available + on `sys.path` while inside the context manager, as well as making `passa` itself + available. + + The environment's `prefix` as well as `scripts_dir` properties are both prepended + to `os.environ["PATH"]` to ensure that calls to `~Environment.run()` use the + environment's path preferentially. + """ + + if not extra_dists: + extra_dists = [] + original_path = sys.path + original_prefix = sys.prefix + parent_path = vistir.compat.Path(__file__).absolute().parent + vendor_dir = parent_path.joinpath("vendor").as_posix() + patched_dir = parent_path.joinpath("patched").as_posix() + parent_path = parent_path.as_posix() + prefix = self.prefix.as_posix() + with vistir.contextmanagers.temp_environ(), vistir.contextmanagers.temp_path(): + os.environ["PATH"] = os.pathsep.join([ + vistir.compat.fs_str(self.scripts_dir), + vistir.compat.fs_str(self.prefix.as_posix()), + os.environ.get("PATH", "") + ]) + os.environ["PYTHONIOENCODING"] = vistir.compat.fs_str("utf-8") + os.environ["PYTHONDONTWRITEBYTECODE"] = vistir.compat.fs_str("1") + os.environ["PATH"] = self.base_paths["PATH"] + os.environ["PYTHONPATH"] = self.base_paths["PYTHONPATH"] + if self.is_venv: + os.environ["VIRTUAL_ENV"] = vistir.compat.fs_str(prefix) + sys.path = self.sys_path + sys.prefix = self.sys_prefix + site = self.safe_import("site") + site.addsitedir(self.base_paths["purelib"]) + if include_extras: + site.addsitedir(parent_path) + sys.path.extend([parent_path, patched_dir, vendor_dir]) + extra_dists = list(self.extra_dists) + extra_dists + for extra_dist in extra_dists: + if extra_dist not in self.get_working_set(): + extra_dist.activate(self.sys_path) + try: + yield + finally: + sys.path = original_path + sys.prefix = original_prefix + six.moves.reload_module(pkg_resources) + + @cached_property + def finders(self): + from pipenv.vendor.pythonfinder import Finder + finders = [ + Finder(path=self.base_paths["scripts"], global_search=gs, system=False) + for gs in (False, True) + ] + return finders + + @property + def finder(self): + return next(iter(self.finders), None) + + def which(self, search, as_path=True): + find = operator.methodcaller("which", search) + result = next(iter(filter(None, (find(finder) for finder in self.finders))), None) + if not result: + result = self._which(search) + else: + if as_path: + result = str(result.path) + return result + + def get_install_args(self, editable=False, setup_path=None): + install_arg = "install" if not editable else "develop" + install_keys = ["headers", "purelib", "platlib", "scripts", "data"] + install_args = [ + self.environment.python, "-u", "-c", SETUPTOOLS_SHIM % setup_path, + install_arg, "--single-version-externally-managed", "--no-deps", + "--prefix={0}".format(self.base_paths["prefix"]), "--no-warn-script-location" + ] + for key in install_keys: + install_args.append( + "--install-{0}={1}".format(key, self.base_paths[key]) + ) + return install_args + + def install(self, requirements): + if not isinstance(requirements, (tuple, list)): + requirements = [requirements,] + with self.get_finder() as finder: + args = [] + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + if finder.index_urls: + args.extend(['-i', finder.index_urls[0]]) + for extra_index in finder.index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + for _, host, _ in finder.secure_origins: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + if finder.process_dependency_links: + args.append('--process-dependency-links') + args.append('--') + args.extend(requirements) + out, _ = vistir.misc.run(args, return_object=False, nospin=True, block=True, + combine_stderr=False) + + @contextlib.contextmanager + def uninstall(self, pkgname, *args, **kwargs): + """A context manager which allows uninstallation of packages from the environment + + :param str pkgname: The name of a package to uninstall + + >>> env = Environment("/path/to/env/root") + >>> with env.uninstall("pytz", auto_confirm=True, verbose=False) as uninstaller: + cleaned = uninstaller.paths + >>> if cleaned: + print("uninstalled packages: %s" % cleaned) + """ + + auto_confirm = kwargs.pop("auto_confirm", True) + verbose = kwargs.pop("verbose", False) + with self.activated(): + monkey_patch = next(iter( + dist for dist in self.base_working_set + if dist.project_name == "recursive-monkey-patch" + ), None) + if monkey_patch: + monkey_patch.activate() + pip_shims = self.safe_import("pip_shims") + pathset_base = pip_shims.UninstallPathSet + import recursive_monkey_patch + recursive_monkey_patch.monkey_patch( + PatchedUninstaller, pathset_base + ) + dist = next( + iter(filter(lambda d: d.project_name == pkgname, self.get_working_set())), + None + ) + pathset = pathset_base.from_dist(dist) + if pathset is not None: + pathset.remove(auto_confirm=auto_confirm, verbose=verbose) + try: + yield pathset + except Exception as e: + if pathset is not None: + pathset.rollback() + else: + if pathset is not None: + pathset.commit() + if pathset is None: + return + + +class PatchedUninstaller(object): + def _permitted(self, path): + return True + + +SETUPTOOLS_SHIM = ( + "import setuptools, tokenize;__file__=%r;" + "f=getattr(tokenize, 'open', open)(__file__);" + "code=f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) diff --git a/pipenv/project.py b/pipenv/project.py index d4713b89f2..d3b56be6d7 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -19,6 +19,7 @@ import toml import tomlkit +from .environment import Environment from .cmdparse import Script from .utils import ( pep423_name, @@ -35,7 +36,7 @@ get_workon_home, is_virtual_environment, looks_like_dir, - sys_version + get_canonical_names ) from .environments import ( PIPENV_MAX_DEPTH, @@ -45,7 +46,6 @@ PIPENV_TEST_INDEX, PIPENV_PYTHON, PIPENV_DEFAULT_PYTHON_VERSION, - PIPENV_CACHE_DIR ) @@ -154,6 +154,7 @@ def __init__(self, which=None, python_version=None, chdir=True): self._lockfile_newlines = DEFAULT_NEWLINES self._requirements_location = None self._original_dir = os.path.abspath(os.curdir) + self._environment = None self._which = which self._build_system = { "requires": ["setuptools", "wheel"] @@ -316,105 +317,48 @@ def working_set(self): import pkg_resources return pkg_resources.WorkingSet(sys_path) - def find_egg(self, egg_dist): - import site - from distutils import sysconfig as distutils_sysconfig - site_packages = distutils_sysconfig.get_python_lib() - search_filename = "{0}.egg-link".format(egg_dist.project_name) - try: - user_site = site.getusersitepackages() - except AttributeError: - user_site = site.USER_SITE - search_locations = [site_packages, user_site] - for site_directory in search_locations: - egg = os.path.join(site_directory, search_filename) - if os.path.isfile(egg): - return egg - - def locate_dist(self, dist): - location = self.find_egg(dist) - if not location: - return dist.location - - def dist_is_in_project(self, dist): - prefix = _normalized(self.env_paths["prefix"]) - location = self.locate_dist(dist) - if not location: - return False - return _normalized(location).startswith(prefix) - - def get_installed_packages(self): - workingset = self.working_set - if self.virtualenv_exists: - packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] - else: - packages = [pkg for pkg in packages] - return packages + @property + def installed_packages(self): + return self.environment.get_installed_packages() - def get_package_info(self): - from .utils import prepare_pip_source_args - from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder - index_urls = [source.get("url") for source in self.sources] + @property + def installed_package_names(self): + return get_canonical_names([pkg.key for pkg in self.installed_packages]) - class PipCommand(Command): - name = "PipCommand" + @property + def lockfile_package_names(self): + dev_keys = get_canonical_names(self.lockfile_content["develop"].keys()) + default_keys = get_canonical_names(self.lockfile_content["default"].keys()) + return { + "dev": dev_keys, + "default": default_keys, + "combined": dev_keys | default_keys + } - dependency_links = [] - packages = self.get_installed_packages() - # This code is borrowed from pip's current implementation - for dist in packages: - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) + @property + def pipfile_package_names(self): + dev_keys = get_canonical_names(self.dev_packages.keys()) + default_keys = get_canonical_names(self.packages.keys()) + return { + "dev": dev_keys, + "default": default_keys, + "combined": dev_keys | default_keys + } - pip_command = PipCommand() - index_opts = cmdoptions.make_option_group( - index_group, pip_command.parser - ) - cmd_opts = pip_command.cmd_opts - pip_command.parser.insert_option_group(0, index_opts) - pip_command.parser.insert_option_group(0, cmd_opts) - pip_args = prepare_pip_source_args(self.sources, []) - pip_options, _ = pip_command.parser.parse_args(pip_args) - pip_options.cache_dir = PIPENV_CACHE_DIR - pip_options.pre = self.settings.get("pre", False) - with pip_command._build_session(pip_options) as session: - finder = PackageFinder( - find_links=pip_options.find_links, - index_urls=index_urls, allow_all_prereleases=pip_options.pre, - trusted_hosts=pip_options.trusted_hosts, - process_dependency_links=pip_options.process_dependency_links, - session=session + @property + def environment(self): + if not self._environment: + prefix = self.get_location_for_virtualenv() + is_venv = prefix == sys.prefix + sources = self.sources.copy() if self.sources else [DEFAULT_SOURCE,] + self._environment = Environment( + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile ) - finder.add_dependency_links(dependency_links) - - for dist in packages: - typ = 'unknown' - all_candidates = finder.find_all_candidates(dist.key) - if not pip_options.pre: - # Remove prereleases - all_candidates = [ - candidate for candidate in all_candidates - if not candidate.version.is_prerelease - ] - - if not all_candidates: - continue - best_candidate = max(all_candidates, key=finder._candidate_sort_key) - remote_version = best_candidate.version - if best_candidate.location.is_wheel: - typ = 'wheel' - else: - typ = 'sdist' - # This is dirty but makes the rest of the code much cleaner - dist.latest_version = remote_version - dist.latest_filetype = typ - yield dist + self._environment.add_dist("pipenv") + return self._environment def get_outdated_packages(self): - return [ - pkg for pkg in self.get_package_info() - if pkg.latest_version._version > pkg.parsed_version._version - ] + return self.environment.get_outdated_packages() @classmethod def _sanitize(cls, name): @@ -588,7 +532,6 @@ def dump_dict(dictionary, write_to, inline=False): :return: A new toml hierarchical document """ - def gen_table(inline=False): if inline: return tomlkit.inline_table() @@ -1159,49 +1102,6 @@ def proper_case_section(self, section): # Return whether or not values have been changed. return changed_values - @property - def py_version(self): - py_path = self.which("python") - version = python_version(py_path) - return version - - @property - def _pyversion(self): - include_dir = vistir.compat.Path(self.virtualenv_location) / "include" - python_path = next((x for x in include_dir.iterdir() if x.name.startswith("python")), None) - if python_path: - py_version = python_path.name.replace("python", "") - py_version_short, abiflags = py_version[:3], py_version[3:] - return {"py_version_short": py_version_short, "abiflags": abiflags} - return {} - - @property - def env_paths(self): - location = self.virtualenv_location if self.virtualenv_location else sys.prefix - prefix = vistir.compat.Path(location) - import importlib - py_version = tuple([int(v) for v in self.py_version.split(".")]) - py_version_short = ".".join([str(v) for v in py_version[:2]]) - running_version = ".".join([str(v) for v in sys.version_info[:2]]) - try: - _virtualenv = importlib.import_module("virtualenv") - except (ImportError, AttributeError): - with vistir.contextmanagers.temp_path(): - sys.path = vistir.misc.load_path(self.which("python")) - six.moves.reload_module(importlib) - _virtualenv = importlib.import_module("virtualenv") - with sys_version(py_version): - home, lib, inc, bin_ = _virtualenv.path_locations(prefix.absolute().as_posix()) - paths = { - "lib": lib.replace(running_version, py_version_short), - "include": inc.replace(running_version, py_version_short), - "scripts": bin_, - "purelib": lib.replace(running_version, py_version_short), - "prefix": home, - "base": home - } - return paths - @cached_property def finders(self): from .vendor.pythonfinder import Finder From 642b6f94b55924ba5e3af199d6fd679e369a9ee6 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:41:16 -0500 Subject: [PATCH 22/71] Update vistir and requirementslib Signed-off-by: Dan Ryan --- .../requirementslib/models/requirements.py | 25 ++++++---- .../requirementslib/models/setup_info.py | 2 +- pipenv/vendor/vistir/compat.py | 50 +++++++++++++++++-- pipenv/vendor/vistir/misc.py | 7 ++- pipenv/vendor/vistir/path.py | 31 +++++++----- pipenv/vendor/vistir/spin.py | 2 +- 6 files changed, 86 insertions(+), 31 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 514114298c..aafb059b97 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1050,9 +1050,9 @@ def copy(self): @classmethod def from_line(cls, line): - from pip_shims import InstallRequirement + import pip_shims.shims - if isinstance(line, InstallRequirement): + if isinstance(line, pip_shims.shims.InstallRequirement): line = format_requirement(line) hashes = None if "--hash=" in line: @@ -1130,13 +1130,20 @@ def from_line(cls, line): if hashes: args["hashes"] = hashes cls_inst = cls(**args) - if not cls_inst.is_named and (not cls_inst.editable or cls_inst.req._has_hashed_name): - old_name = cls_inst.req.req.name or cls_inst.req.name - info_dict = cls_inst.run_requires() - calced_name = info_dict.get("name", old_name) - if old_name != calced_name: - cls_inst.req.req.line.replace(old_name, calced_name) - cls_inst.name = cls_inst.req.name = calced_name + if not cls_inst.is_named and not cls_inst.editable and not name: + if cls_inst.is_vcs: + ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) + info = SetupInfo.from_ireq(ireq) + if info is not None: + info_dict = info.as_dict() + cls_inst.req.setup_info = info + else: + info_dict = {} + else: + info_dict = cls_inst.run_requires() + found_name = info_dict.get("name", old_name) + if old_name != found_name: + cls_inst.req.req.line.replace(old_name, found_name) return cls_inst @classmethod diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 319dd6bdcb..f2a1ee7929 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -103,7 +103,7 @@ def iter_egginfos(path, pkg_name=None): def find_egginfo(target, pkg_name=None): egg_dirs = (egg_dir for egg_dir in iter_egginfos(target, pkg_name=pkg_name)) if pkg_name: - return next(iter(egg_dirs), None) + yield next(iter(egg_dirs), None) else: for egg_dir in egg_dirs: yield egg_dir diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index d6e8578a18..8322648101 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -20,6 +20,8 @@ "FileNotFoundError", "ResourceWarning", "FileNotFoundError", + "PermissionError", + "IsADirectoryError", "fs_str", "lru_cache", "TemporaryDirectory", @@ -69,8 +71,17 @@ def __init__(self, *args, **kwargs): self.errno = errno.ENOENT super(FileNotFoundError, self).__init__(*args, **kwargs) + class PermissionError(OSError): + def __init__(self, *args, **kwargs): + self.errno = errno.EACCES + super(PermissionError, self).__init__(*args, **kwargs) + + class IsADirectoryError(OSError): + """The command does not work on directories""" + pass + else: - from builtins import ResourceWarning, FileNotFoundError + from builtins import ResourceWarning, FileNotFoundError, PermissionError, IsADirectoryError if not sys.warnoptions: @@ -111,9 +122,39 @@ def __init__(self, suffix="", prefix=None, dir=None): ) @classmethod - def _cleanup(cls, name, warn_message): + def _rmtree(cls, name): from .path import rmtree - rmtree(name) + + def onerror(func, path, exc_info): + if issubclass(exc_info[0], (PermissionError, OSError)): + try: + try: + if path != name: + os.chflags(os.path.dirname(path), 0) + os.chflags(path, 0) + except AttributeError: + pass + if path != name: + os.chmod(os.path.dirname(path), 0o70) + os.chmod(path, 0o700) + + try: + os.unlink(path) + # PermissionError is raised on FreeBSD for directories + except (IsADirectoryError, PermissionError, OSError): + cls._rmtree(path) + except FileNotFoundError: + pass + elif issubclass(exc_info[0], FileNotFoundError): + pass + else: + raise + + rmtree(name, onerror=onerror) + + @classmethod + def _cleanup(cls, name, warn_message): + cls._rmtree(name) warnings.warn(warn_message, ResourceWarning) def __repr__(self): @@ -126,9 +167,8 @@ def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): - from .path import rmtree if self._finalizer.detach(): - rmtree(self.name) + self._rmtree(self.name) def fs_str(string): diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 7342bc97de..a9a127d81a 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -35,7 +35,9 @@ class WindowsError(OSError): "locale_encoding", "chunked", "take", - "divide" + "divide", + "getpreferredencoding", + "decode_for_output", ] @@ -492,7 +494,8 @@ def chunked(n, iterable): def getpreferredencoding(): - import locale + """Determine the proper output encoding for terminal rendering""" + # Borrowed from Invoke # (see https://github.com/pyinvoke/invoke/blob/93af29d/invoke/runners.py#L881) _encoding = locale.getpreferredencoding(False) diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index b1236884e9..23ae025205 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -183,10 +183,9 @@ def mkdir_p(newdir, mode=0o777): :raises: OSError if a file is encountered along the way """ # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ - from .misc import to_text - from .compat import to_native_string + from .misc import to_bytes, to_text - newdir = to_native_string(newdir) + newdir = to_bytes(newdir, "utf-8") if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( @@ -195,9 +194,9 @@ def mkdir_p(newdir, mode=0o777): ) ) else: - head, tail = os.path.split(newdir) + head, tail = os.path.split(to_bytes(newdir, encoding="utf-8")) # Make sure the tail doesn't point to the asame place as the head - curdir = to_native_string(".") + curdir = to_bytes(".", encoding="utf-8") tail_and_head_match = ( os.path.relpath(tail, start=os.path.basename(head)) == curdir ) @@ -205,8 +204,9 @@ def mkdir_p(newdir, mode=0o777): target = os.path.join(head, tail) if os.path.exists(target) and os.path.isfile(target): raise OSError( - "A file with the same name as the desired dir, '{0}', " - "already exists.".format(to_text(newdir, encoding="utf-8")) + "A file with the same name as the desired dir, '{0}', already exists.".format( + to_text(newdir, encoding="utf-8") + ) ) os.makedirs(os.path.join(head, tail), mode) @@ -277,13 +277,13 @@ def set_write_bit(fn): if not os.path.isdir(fn): return for root, dirs, files in os.walk(fn, topdown=False): - for dir_ in [os.path.join(root,d) for d in dirs]: + for dir_ in [os.path.join(root, d) for d in dirs]: set_write_bit(dir_) for file_ in [os.path.join(root, f) for f in files]: set_write_bit(file_) -def rmtree(directory, ignore_errors=False): +def rmtree(directory, ignore_errors=False, onerror=None): """Stand-in for :func:`~shutil.rmtree` with additional error-handling. This version of `rmtree` handles read-only paths, especially in the case of index @@ -291,6 +291,7 @@ def rmtree(directory, ignore_errors=False): :param str directory: The target directory to remove :param bool ignore_errors: Whether to ignore errors, defaults to False + :param func onerror: An error handling function, defaults to :func:`handle_remove_readonly` .. note:: @@ -300,9 +301,11 @@ def rmtree(directory, ignore_errors=False): from .compat import to_native_string directory = to_native_string(directory) + if onerror is None: + onerror = handle_remove_readonly try: shutil.rmtree( - directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly + directory, ignore_errors=ignore_errors, onerror=onerror ) except (IOError, OSError, FileNotFoundError) as exc: # Ignore removal failures where the file doesn't exist @@ -325,7 +328,9 @@ def handle_remove_readonly(func, path, exc): :func:`set_write_bit` on the target path and try again. """ # Check for read-only attribute - from .compat import ResourceWarning, FileNotFoundError, to_native_string + from .compat import ( + ResourceWarning, FileNotFoundError, PermissionError, to_native_string + ) PERM_ERRORS = (errno.EACCES, errno.EPERM, errno.ENOENT) default_warning_message = ( @@ -339,7 +344,7 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError, FileNotFoundError) as e: + except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno == errno.ENOENT: return elif e.errno in PERM_ERRORS: @@ -350,7 +355,7 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError, FileNotFoundError) as e: + except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno in PERM_ERRORS: warnings.warn(default_warning_message.format(path), ResourceWarning) pass diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index f0d9e77ffd..09ecbacea8 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -292,6 +292,6 @@ def _clear_line(): def create_spinner(*args, **kwargs): nospin = kwargs.pop("nospin", False) use_yaspin = kwargs.pop("use_yaspin", nospin) - if nospin: + if nospin or not use_yaspin: return DummySpinner(*args, **kwargs) return VistirSpinner(*args, **kwargs) From aedb41c65d1a9a1e369d567f034827b9da1c9a3f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:44:35 -0500 Subject: [PATCH 23/71] Fix stdout and stderr wrappers Signed-off-by: Dan Ryan --- pipenv/__init__.py | 10 ++++++---- pipenv/resolver.py | 11 ++++++++--- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/pipenv/__init__.py b/pipenv/__init__.py index ba4dd9c3e0..4d137e7f6c 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -28,10 +28,12 @@ if sys.stdout.isatty() and sys.stderr.isatty(): import io import atexit - sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') - atexit.register(sys.stdout.close) - sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') - atexit.register(sys.stdout.close) + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = fs_str("1") diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 9ef46878c9..e87f324350 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -99,8 +99,13 @@ def main(): import io import six if six.PY3: - sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') - sys.stderr = io.TextIOWrapper(sys.stderr.buffer,encoding='utf8') + import atexit + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper else: from pipenv._compat import force_encoding force_encoding() @@ -111,7 +116,7 @@ def main(): # sys.argv = remaining parsed = handle_parsed_args(parsed) _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, - parsed.requirements_dir, parsed.packages) + parsed.requirements_dir, parsed.packages) if __name__ == "__main__": From 45100b8a46f10955e9cf89a6932b695219048695 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:44:35 -0500 Subject: [PATCH 24/71] Fix stdout and stderr wrappers Signed-off-by: Dan Ryan --- news/3196.vendor.rst | 1 + pipenv/__init__.py | 10 ++++++---- pipenv/resolver.py | 11 ++++++++--- 3 files changed, 15 insertions(+), 7 deletions(-) create mode 100644 news/3196.vendor.rst diff --git a/news/3196.vendor.rst b/news/3196.vendor.rst new file mode 100644 index 0000000000..19351e2e73 --- /dev/null +++ b/news/3196.vendor.rst @@ -0,0 +1 @@ +Updated ``requirementslib`` to aid in resolution of local and remote archives. diff --git a/pipenv/__init__.py b/pipenv/__init__.py index ba4dd9c3e0..4d137e7f6c 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -28,10 +28,12 @@ if sys.stdout.isatty() and sys.stderr.isatty(): import io import atexit - sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') - atexit.register(sys.stdout.close) - sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') - atexit.register(sys.stdout.close) + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = fs_str("1") diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 9ef46878c9..e87f324350 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -99,8 +99,13 @@ def main(): import io import six if six.PY3: - sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') - sys.stderr = io.TextIOWrapper(sys.stderr.buffer,encoding='utf8') + import atexit + stdout_wrapper = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8') + atexit.register(stdout_wrapper.close) + stderr_wrapper = io.TextIOWrapper(sys.stderr.buffer, encoding='utf8') + atexit.register(stderr_wrapper.close) + sys.stdout = stdout_wrapper + sys.stderr = stderr_wrapper else: from pipenv._compat import force_encoding force_encoding() @@ -111,7 +116,7 @@ def main(): # sys.argv = remaining parsed = handle_parsed_args(parsed) _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, - parsed.requirements_dir, parsed.packages) + parsed.requirements_dir, parsed.packages) if __name__ == "__main__": From 118c9d3fe69239150f54161b10d444e76f43f140 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 17:48:04 -0500 Subject: [PATCH 25/71] Add dramatically improved queued installation - Use queues and parallelized installation - Better UI/UX -- progress bar moves gradually as items are moved on and off the queue - Queue is handled by item instead of in massive batches - TODO: Call out when task is done from the install function? Signed-off-by: Dan Ryan --- ...90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst | 1 + pipenv/core.py | 241 +++++++++--------- 2 files changed, 124 insertions(+), 118 deletions(-) create mode 100644 news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst diff --git a/news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst b/news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst new file mode 100644 index 0000000000..f868fd95d8 --- /dev/null +++ b/news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst @@ -0,0 +1 @@ +Improved asynchronous installation and error handling via queued subprocess paralleization. diff --git a/pipenv/core.py b/pipenv/core.py index 021b0f3ca1..ef5439dd2e 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -654,6 +654,100 @@ def do_where(virtualenv=False, bare=True): click.echo(location) +def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True): + while not procs.empty(): + c = procs.get() + # if concurrent: + c.block() + failed = False + if c.return_code != 0: + failed = True + if "Ignoring" in c.out: + click.echo(crayons.yellow(c.out.strip())) + elif environments.is_verbose(): + click.echo(crayons.blue(c.out.strip() or c.err.strip())) + # The Installation failed… + if failed: + if not retry: + # The Installation failed… + # We echo both c.out and c.err because pip returns error details on out. + err = c.err.strip().splitlines() if c.err else [] + out = c.out.strip().splitlines() if c.out else [] + err_lines = [line for line in [out, err]] + # Return the subprocess' return code. + raise exceptions.InstallError(c.dep.name, extra=err_lines) + # Save the Failed Dependency for later. + dep = c.dep.copy() + failed_deps_queue.put(dep) + # Alert the user. + click.echo( + "{0} {1}! Will try again.".format( + crayons.red("An error occurred while installing"), + crayons.green(dep.as_line()), + ), err=True + ) + + +def batch_install(deps_list, procs, failed_deps_queue, + requirements_dir, no_deps=False, ignore_hashes=False, + allow_global=False, blocking=False, pypi_mirror=None, + nprocs=PIPENV_MAX_SUBPROCESS, retry=True): + + failed = (not retry) + if not failed: + label = INSTALL_LABEL if os.name != "nt" else "" + else: + label = INSTALL_LABEL2 + + deps_list_bar = progress.bar( + deps_list, width=32, + label=label + ) + indexes = [] + trusted_hosts = [] + # Install these because + for dep in deps_list_bar: + index = None + if dep.index: + index = project.find_source(dep.index) + indexes.append(index) + if not index.get("verify_ssl", False): + trusted_hosts.append(urllib3_util.parse_url(index.get("url")).host) + # Install the module. + is_artifact = False + if dep.is_file_or_url and (dep.is_direct_url or any( + dep.req.uri.endswith(ext) for ext in ["zip", "tar.gz"] + )): + is_artifact = True + + extra_indexes = [] + if not index and indexes: + index = next(iter(indexes)) + if len(indexes) > 1: + extra_indexes = indexes[1:] + + with vistir.contextmanagers.temp_environ(): + os.environ["PIP_USER"] = vistir.compat.fs_str("0") + c = pip_install( + dep, + ignore_hashes=any([ignore_hashes, dep.editable, dep.is_vcs]), + allow_global=allow_global, + no_deps=False if is_artifact else no_deps, + block=any([dep.is_vcs, blocking]), + index=index, + requirements_dir=requirements_dir, + pypi_mirror=pypi_mirror, + trusted_hosts=trusted_hosts, + extra_indexes=extra_indexes + ) + if procs.qsize() < nprocs: + c.dep = dep + procs.put(c) + + if procs.full() or procs.qsize() == len(deps_list): + _cleanup_procs(procs, not blocking, failed_deps_queue, retry=retry) + + def do_install_dependencies( dev=False, only=False, @@ -670,33 +764,8 @@ def do_install_dependencies( If requirements is True, simply spits out a requirements format to stdout. """ - from six.moves import queue - - def cleanup_procs(procs, concurrent): - while not procs.empty(): - c = procs.get() - # if concurrent: - c.block() - failed = False - if c.return_code != 0: - failed = True - if "Ignoring" in c.out: - click.echo(crayons.yellow(c.out.strip())) - elif environments.is_verbose(): - click.echo(crayons.blue(c.out.strip() or c.err.strip())) - # The Installation failed… - if failed: - # Save the Failed Dependency for later. - dep = c.dep.copy() - failed_deps_list.append(dep) - # Alert the user. - click.echo( - "{0} {1}! Will try again.".format( - crayons.red("An error occurred while installing"), - crayons.green(dep.as_line()), - ), err=True - ) + from six.moves import queue if requirements: bare = True blocking = not concurrent @@ -720,7 +789,6 @@ def cleanup_procs(procs, concurrent): ) # Allow pip to resolve dependencies when in skip-lock mode. no_deps = not skip_lock - failed_deps_list = [] deps_list = list(lockfile.get_requirements(dev=dev, only=True)) if requirements: index_args = prepare_pip_source_args(project.sources) @@ -736,106 +804,43 @@ def cleanup_procs(procs, concurrent): sys.exit(0) procs = queue.Queue(maxsize=PIPENV_MAX_SUBPROCESS) - trusted_hosts = [] + failed_deps_queue = queue.Queue() - deps_list_bar = progress.bar( - deps_list, width=32, - label=INSTALL_LABEL if os.name != "nt" else "", - ) - indexes = [] - for dep in deps_list_bar: - index = None - if dep.index: - index = project.find_source(dep.index) - indexes.append(index) - if not index.get("verify_ssl", False): - trusted_hosts.append(urllib3_util.parse_url(index.get("url")).host) - # Install the module. - is_artifact = False - if dep.is_file_or_url and any( - dep.req.uri.endswith(ext) for ext in ["zip", "tar.gz"] - ): - is_artifact = True + install_kwargs = { + "no_deps": no_deps, "ignore_hashes": ignore_hashes, "allow_global": allow_global, + "blocking": blocking, "pypi_mirror": pypi_mirror + } + if concurrent: + install_kwargs["nprocs"] = PIPENV_MAX_SUBPROCESS + else: + install_kwargs["nprocs"] = 1 - extra_indexes = [] - if not index and indexes: - index = next(iter(indexes)) - if len(indexes) > 1: - extra_indexes = indexes[1:] - with vistir.contextmanagers.temp_environ(): - os.environ["PIP_USER"] = vistir.compat.fs_str("0") - c = pip_install( - dep, - ignore_hashes=any([ignore_hashes, dep.editable, dep.is_vcs]), - allow_global=allow_global, - no_deps=False if is_artifact else no_deps, - block=any([dep.editable, blocking]), - index=index, - requirements_dir=requirements_dir, - pypi_mirror=pypi_mirror, - trusted_hosts=trusted_hosts, - extra_indexes=extra_indexes - ) - if procs.qsize() < PIPENV_MAX_SUBPROCESS: - c.dep = dep - procs.put(c) + batch_install( + deps_list, procs, failed_deps_queue, requirements_dir, **install_kwargs + ) - if procs.full() or procs.qsize() == len(deps_list): - cleanup_procs(procs, concurrent) if not procs.empty(): - cleanup_procs(procs, concurrent) + _cleanup_procs(procs, concurrent, failed_deps_queue) # Iterate over the hopefully-poorly-packaged dependencies… - if failed_deps_list: + if not failed_deps_queue.empty(): click.echo( crayons.normal(fix_utf8("Installing initially failed dependencies…"), bold=True) ) - for dep in progress.bar(failed_deps_list, label=INSTALL_LABEL2): - # Use a specific index, if specified. - # Install the module. - is_artifact = False - index = None - if dep.index: - index = project.find_source(dep.index) - if dep.is_file_or_url and any( - dep.req.uri.endswith(ext) for ext in ["zip", "tar.gz"] - ): - is_artifact = True - extra_indexes = [] - if not index and indexes: - index = next(iter(indexes)) - if len(indexes) > 1: - extra_indexes = indexes[1:] - with vistir.contextmanagers.temp_environ(): - os.environ["PIP_USER"] = vistir.compat.fs_str("0") - c = pip_install( - dep, - ignore_hashes=any([ignore_hashes, dep.editable, dep.is_vcs]), - allow_global=allow_global, - no_deps=True if is_artifact else no_deps, - index=index, - requirements_dir=requirements_dir, - pypi_mirror=pypi_mirror, - trusted_hosts=trusted_hosts, - extra_indexes=extra_indexes, - block=True - ) - # The Installation failed… - if c.return_code != 0: - # We echo both c.out and c.err because pip returns error details on out. - click.echo(crayons.blue(format_pip_output(c.out))) - click.echo(crayons.blue(format_pip_error(c.err)), err=True) - # Return the subprocess' return code. - sys.exit(c.return_code) - else: - if environments.is_verbose(): - click.echo( - "{0} {1}{2}".format( - crayons.green("Success installing"), - crayons.green(dep.as_line(include_hashes=False)), - crayons.green("!"), - ), - ) + retry_list = [] + while not failed_deps_queue.empty(): + failed_dep = failed_deps_queue.get() + retry_list.append(failed_dep) + install_kwargs.update({ + "nprocs": 1, + "retry": False, + "blocking": True, + }) + batch_install( + retry_list, procs, failed_deps_queue, requirements_dir, **install_kwargs + ) + if not procs.empty(): + _cleanup_procs(procs, False, failed_deps_queue, retry=False) def convert_three_to_python(three, python): From 8643a733c2af08bab269aabe99cc6f80c3d42e7f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:05:25 -0500 Subject: [PATCH 26/71] Fix configparser import Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index f2a1ee7929..481f0494c5 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,5 +1,4 @@ # -*- coding=utf-8 -*- -import configparser import contextlib import os @@ -14,6 +13,7 @@ import distutils from appdirs import user_cache_dir +from six.moves import configparser from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd From 0caf7a013280d173354260e62ab17f3843ecd85d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:05:25 -0500 Subject: [PATCH 27/71] Fix configparser import Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index f2a1ee7929..481f0494c5 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,5 +1,4 @@ # -*- coding=utf-8 -*- -import configparser import contextlib import os @@ -14,6 +13,7 @@ import distutils from appdirs import user_cache_dir +from six.moves import configparser from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd From 650cc32fe676619a3478b7712e48b511a6f2ac4b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:49:44 -0500 Subject: [PATCH 28/71] Fix resource errors Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 5 ++--- pipenv/vendor/requirementslib/models/cache.py | 2 ++ pipenv/vendor/requirementslib/models/dependencies.py | 3 +++ 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..5e86737491 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 881e9ac9d9..05fd19438c 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -3,12 +3,11 @@ import logging import warnings - -warnings.filterwarnings("ignore", category=ResourceWarning) - +from vistir.compat import ResourceWarning logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) +warnings.filterwarnings("ignore", category=ResourceWarning) from .models.requirements import Requirement from .models.lockfile import Lockfile diff --git a/pipenv/vendor/requirementslib/models/cache.py b/pipenv/vendor/requirementslib/models/cache.py index 0b8c47b19d..f1639ea228 100644 --- a/pipenv/vendor/requirementslib/models/cache.py +++ b/pipenv/vendor/requirementslib/models/cache.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals +import atexit import copy import hashlib import json @@ -197,6 +198,7 @@ def __init__(self, *args, **kwargs): if not session: import requests session = requests.session() + atexit.register(session.close) cache_dir = kwargs.pop('cache_dir', CACHE_DIR) self.session = session kwargs.setdefault('directory', os.path.join(cache_dir, 'hash-cache')) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 48e84d0fc3..f87fd585e9 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -1,5 +1,6 @@ # -*- coding=utf-8 -*- +import atexit import contextlib import copy import functools @@ -361,6 +362,7 @@ def get_dependencies_from_json(ireq): return session = requests.session() + atexit.register(session.close) version = str(ireq.req.specifier).lstrip("=") def gen(ireq): @@ -575,6 +577,7 @@ def get_finder(sources=None, pip_command=None, pip_options=None): if not pip_options: pip_options = get_pip_options(sources=sources, pip_command=pip_command) session = pip_command._build_session(pip_options) + atexit.register(session.close) finder = pip_shims.shims.PackageFinder( find_links=[], index_urls=[s.get("url") for s in sources], From fe9d996f89aa98e047d795ae6cea7957986ccc26 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 18:49:44 -0500 Subject: [PATCH 29/71] Fix resource errors Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 5 ++--- pipenv/vendor/requirementslib/models/cache.py | 2 ++ pipenv/vendor/requirementslib/models/dependencies.py | 3 +++ 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..5e86737491 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 881e9ac9d9..05fd19438c 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -3,12 +3,11 @@ import logging import warnings - -warnings.filterwarnings("ignore", category=ResourceWarning) - +from vistir.compat import ResourceWarning logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) +warnings.filterwarnings("ignore", category=ResourceWarning) from .models.requirements import Requirement from .models.lockfile import Lockfile diff --git a/pipenv/vendor/requirementslib/models/cache.py b/pipenv/vendor/requirementslib/models/cache.py index 0b8c47b19d..f1639ea228 100644 --- a/pipenv/vendor/requirementslib/models/cache.py +++ b/pipenv/vendor/requirementslib/models/cache.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals +import atexit import copy import hashlib import json @@ -197,6 +198,7 @@ def __init__(self, *args, **kwargs): if not session: import requests session = requests.session() + atexit.register(session.close) cache_dir = kwargs.pop('cache_dir', CACHE_DIR) self.session = session kwargs.setdefault('directory', os.path.join(cache_dir, 'hash-cache')) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 48e84d0fc3..f87fd585e9 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -1,5 +1,6 @@ # -*- coding=utf-8 -*- +import atexit import contextlib import copy import functools @@ -361,6 +362,7 @@ def get_dependencies_from_json(ireq): return session = requests.session() + atexit.register(session.close) version = str(ireq.req.specifier).lstrip("=") def gen(ireq): @@ -575,6 +577,7 @@ def get_finder(sources=None, pip_command=None, pip_options=None): if not pip_options: pip_options = get_pip_options(sources=sources, pip_command=pip_command) session = pip_command._build_session(pip_options) + atexit.register(session.close) finder = pip_shims.shims.PackageFinder( find_links=[], index_urls=[s.get("url") for s in sources], From e5be2ac50ac1e26f65bad5844bb05db74c53ce36 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 19:03:55 -0500 Subject: [PATCH 30/71] Fix python 2.7 installations Signed-off-by: Dan Ryan --- pipenv/core.py | 20 ++++++++++++-------- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/exceptions.py | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 40fc87c464..99184c3d13 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1852,7 +1852,7 @@ def do_install( # Install all dependencies, if none was provided. # This basically ensures that we have a pipfile and lockfile, then it locks and # installs from the lockfile - if packages is False and editable_packages is False: + if not packages and not editable_packages: # Update project settings with pre preference. if pre: project.update_settings({"allow_prereleases": pre}) @@ -1877,13 +1877,17 @@ def do_install( # make a tuple of (display_name, entry) pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] if not system and not project.virtualenv_exists: - with create_spinner("Creating virtualenv...") as sp: - try: - do_create_virtualenv(pypi_mirror=pypi_mirror) - except KeyboardInterrupt: - cleanup_virtualenv(bare=(not environments.is_verbose())) - sys.exit(1) - sp.write_err("Ok...") + do_init( + dev=dev, + system=system, + allow_global=system, + concurrent=concurrent, + keep_outdated=keep_outdated, + requirements_dir=requirements_directory, + deploy=deploy, + pypi_mirror=pypi_mirror, + skip_lock=skip_lock, + ) for pkg_line in pkg_list: click.echo( crayons.normal( diff --git a/pipenv/environment.py b/pipenv/environment.py index 5e86737491..db0e22aa40 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index 1a73f98e77..23bc5e5024 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import +from __future__ import absolute_import, print_function import errno import os import six From 382be38bbabb3b65425148548bd51fddc1fb28e1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 19:03:55 -0500 Subject: [PATCH 31/71] Fix python 2.7 installations Signed-off-by: Dan Ryan --- pipenv/core.py | 20 ++++++++++++-------- pipenv/environment.py | 2 +- pipenv/vendor/requirementslib/exceptions.py | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index c5891a5726..3dd126bffe 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1847,7 +1847,7 @@ def do_install( # Install all dependencies, if none was provided. # This basically ensures that we have a pipfile and lockfile, then it locks and # installs from the lockfile - if packages is False and editable_packages is False: + if not packages and not editable_packages: # Update project settings with pre preference. if pre: project.update_settings({"allow_prereleases": pre}) @@ -1872,13 +1872,17 @@ def do_install( # make a tuple of (display_name, entry) pkg_list = packages + ["-e {0}".format(pkg) for pkg in editable_packages] if not system and not project.virtualenv_exists: - with create_spinner("Creating virtualenv...") as sp: - try: - do_create_virtualenv(pypi_mirror=pypi_mirror) - except KeyboardInterrupt: - cleanup_virtualenv(bare=(not environments.is_verbose())) - sys.exit(1) - sp.write_err("Ok...") + do_init( + dev=dev, + system=system, + allow_global=system, + concurrent=concurrent, + keep_outdated=keep_outdated, + requirements_dir=requirements_directory, + deploy=deploy, + pypi_mirror=pypi_mirror, + skip_lock=skip_lock, + ) for pkg_line in pkg_list: click.echo( crayons.normal( diff --git a/pipenv/environment.py b/pipenv/environment.py index 5e86737491..db0e22aa40 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -25,7 +25,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) if not sources: sources = [] self.sources = sources diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index 1a73f98e77..23bc5e5024 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import +from __future__ import absolute_import, print_function import errno import os import six From 489e534c9ae0d912390a2681b0887839d492bbf2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:38:09 -0500 Subject: [PATCH 32/71] Fix various bugs with python 2.7 and vendored deps Signed-off-by: Dan Ryan --- pipenv/project.py | 2 +- .../requirementslib/models/setup_info.py | 30 ++++++++++++++++--- pipenv/vendor/vistir/contextmanagers.py | 8 +++-- pipenv/vendor/vistir/spin.py | 2 +- 4 files changed, 34 insertions(+), 8 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index fbc18f1970..0eafff8e79 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -350,7 +350,7 @@ def environment(self): if not self._environment: prefix = self.get_location_for_virtualenv() is_venv = prefix == sys.prefix - sources = self.sources.copy() if self.sources else [DEFAULT_SOURCE,] + sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile ) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 481f0494c5..247d63f2ef 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- import contextlib import os +import sys import attr import packaging.version @@ -29,6 +30,11 @@ CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) +# The following are necessary for people who like to use "if __name__" conditionals +# in their setup.py scripts +_setup_stop_after = None +_setup_distribution = None + @contextlib.contextmanager def _suppress_distutils_logs(): @@ -116,7 +122,7 @@ def get_metadata(path, pkg_name=None): if egg_dir is not None: import pkg_resources - egg_dir = os.path.abspath(egg_dir) + egg_dir = os.path.abspath(egg_dir.path) base_dir = os.path.dirname(egg_dir) path_metadata = pkg_resources.PathMetadata(base_dir, egg_dir) dist = next( @@ -216,10 +222,26 @@ def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): from setuptools.dist import distutils + save_argv = sys.argv.copy() + try: + # This is for you, Hynek + # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + global _setup_distribution, _setup_stop_after + _setup_stop_after = "run" + script_name = self.setup_py.as_posix() + g = {"__file__": script_name, "__name__": "__main__"} + sys.argv[0] = script_name + sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + with open(script_name, 'rb') as f: + exec(f.read(), g) + finally: + _setup_stop_after = None + sys.argv = save_argv + dist = _setup_distribution + if not dist: + self.get_egg_metadata() + return - dist = distutils.core.run_setup( - self.setup_py.as_posix(), ["egg_info", "--egg-base", self.base_dir] - ) name = dist.get_name() if name: self.name = name diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 59b97ca0e3..3f19112087 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -118,10 +118,11 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): """ from .spin import create_spinner - has_yaspin = False + has_yaspin = None try: import yaspin except ImportError: + has_yaspin = False if not nospin: raise RuntimeError( "Failed to import spinner! Reinstall vistir with command:" @@ -132,6 +133,9 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): else: has_yaspin = True spinner_name = "" + use_yaspin = (has_yaspin is False) or (nospin is True) + if has_yaspin is None or has_yaspin is True and not nospin: + use_yaspin = True if not start_text and nospin is False: start_text = "Running..." with create_spinner( @@ -139,7 +143,7 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): text=start_text, handler_map=handler_map, nospin=nospin, - use_yaspin=has_yaspin + use_yaspin=use_yaspin ) as _spinner: yield _spinner diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 09ecbacea8..e7311555f7 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -291,7 +291,7 @@ def _clear_line(): def create_spinner(*args, **kwargs): nospin = kwargs.pop("nospin", False) - use_yaspin = kwargs.pop("use_yaspin", nospin) + use_yaspin = kwargs.pop("use_yaspin", not nospin) if nospin or not use_yaspin: return DummySpinner(*args, **kwargs) return VistirSpinner(*args, **kwargs) From 398463245332633d80ba87dcf664a645b0ed233d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:38:09 -0500 Subject: [PATCH 33/71] Fix various bugs with python 2.7 and vendored deps Signed-off-by: Dan Ryan --- pipenv/project.py | 2 +- .../requirementslib/models/setup_info.py | 30 ++++++++++++++++--- pipenv/vendor/vistir/contextmanagers.py | 8 +++-- pipenv/vendor/vistir/spin.py | 2 +- 4 files changed, 34 insertions(+), 8 deletions(-) diff --git a/pipenv/project.py b/pipenv/project.py index fbc18f1970..0eafff8e79 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -350,7 +350,7 @@ def environment(self): if not self._environment: prefix = self.get_location_for_virtualenv() is_venv = prefix == sys.prefix - sources = self.sources.copy() if self.sources else [DEFAULT_SOURCE,] + sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile ) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 481f0494c5..247d63f2ef 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- import contextlib import os +import sys import attr import packaging.version @@ -29,6 +30,11 @@ CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) +# The following are necessary for people who like to use "if __name__" conditionals +# in their setup.py scripts +_setup_stop_after = None +_setup_distribution = None + @contextlib.contextmanager def _suppress_distutils_logs(): @@ -116,7 +122,7 @@ def get_metadata(path, pkg_name=None): if egg_dir is not None: import pkg_resources - egg_dir = os.path.abspath(egg_dir) + egg_dir = os.path.abspath(egg_dir.path) base_dir = os.path.dirname(egg_dir) path_metadata = pkg_resources.PathMetadata(base_dir, egg_dir) dist = next( @@ -216,10 +222,26 @@ def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): from setuptools.dist import distutils + save_argv = sys.argv.copy() + try: + # This is for you, Hynek + # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + global _setup_distribution, _setup_stop_after + _setup_stop_after = "run" + script_name = self.setup_py.as_posix() + g = {"__file__": script_name, "__name__": "__main__"} + sys.argv[0] = script_name + sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + with open(script_name, 'rb') as f: + exec(f.read(), g) + finally: + _setup_stop_after = None + sys.argv = save_argv + dist = _setup_distribution + if not dist: + self.get_egg_metadata() + return - dist = distutils.core.run_setup( - self.setup_py.as_posix(), ["egg_info", "--egg-base", self.base_dir] - ) name = dist.get_name() if name: self.name = name diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 59b97ca0e3..3f19112087 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -118,10 +118,11 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): """ from .spin import create_spinner - has_yaspin = False + has_yaspin = None try: import yaspin except ImportError: + has_yaspin = False if not nospin: raise RuntimeError( "Failed to import spinner! Reinstall vistir with command:" @@ -132,6 +133,9 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): else: has_yaspin = True spinner_name = "" + use_yaspin = (has_yaspin is False) or (nospin is True) + if has_yaspin is None or has_yaspin is True and not nospin: + use_yaspin = True if not start_text and nospin is False: start_text = "Running..." with create_spinner( @@ -139,7 +143,7 @@ def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): text=start_text, handler_map=handler_map, nospin=nospin, - use_yaspin=has_yaspin + use_yaspin=use_yaspin ) as _spinner: yield _spinner diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 09ecbacea8..e7311555f7 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -291,7 +291,7 @@ def _clear_line(): def create_spinner(*args, **kwargs): nospin = kwargs.pop("nospin", False) - use_yaspin = kwargs.pop("use_yaspin", nospin) + use_yaspin = kwargs.pop("use_yaspin", not nospin) if nospin or not use_yaspin: return DummySpinner(*args, **kwargs) return VistirSpinner(*args, **kwargs) From 32a6dd38a56e40b408317dc922b0e9f703b9be71 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:58:52 -0500 Subject: [PATCH 34/71] Support python 2 parsing Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 247d63f2ef..561ba15610 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -221,19 +221,22 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): - from setuptools.dist import distutils - save_argv = sys.argv.copy() - try: + if sys.version_info < (3, 5): + save_argv = sys.argv[:] + else: + save_argv = sys.argv.copy() # This is for you, Hynek # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" script_name = self.setup_py.as_posix() g = {"__file__": script_name, "__name__": "__main__"} + l = {} sys.argv[0] = script_name sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] with open(script_name, 'rb') as f: - exec(f.read(), g) + exec(f.read(), g, l) finally: _setup_stop_after = None sys.argv = save_argv From 4009198340db2086dca831f445177ba9e1e26146 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 00:08:28 -0500 Subject: [PATCH 35/71] Fix environment site import Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..c3c33dda24 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -7,6 +7,7 @@ import sys import operator import pkg_resources +import site import six from distutils.sysconfig import get_python_lib @@ -239,7 +240,6 @@ def get_distributions(self): return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) def find_egg(self, egg_dist): - import site site_packages = get_python_lib() search_filename = "{0}.egg-link".format(egg_dist.project_name) try: @@ -476,7 +476,6 @@ def activated(self, include_extras=True, extra_dists=None): os.environ["VIRTUAL_ENV"] = vistir.compat.fs_str(prefix) sys.path = self.sys_path sys.prefix = self.sys_prefix - site = self.safe_import("site") site.addsitedir(self.base_paths["purelib"]) if include_extras: site.addsitedir(parent_path) From 32b1113fbb6bad61f138f1aa1c47d1a686f35baa Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 11 Nov 2018 23:58:52 -0500 Subject: [PATCH 36/71] Support python 2 parsing Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 247d63f2ef..561ba15610 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -221,19 +221,22 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): with cd(self.setup_py.parent), _suppress_distutils_logs(): - from setuptools.dist import distutils - save_argv = sys.argv.copy() - try: + if sys.version_info < (3, 5): + save_argv = sys.argv[:] + else: + save_argv = sys.argv.copy() # This is for you, Hynek # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" script_name = self.setup_py.as_posix() g = {"__file__": script_name, "__name__": "__main__"} + l = {} sys.argv[0] = script_name sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] with open(script_name, 'rb') as f: - exec(f.read(), g) + exec(f.read(), g, l) finally: _setup_stop_after = None sys.argv = save_argv From 1216ae0c8a898ce81a3261762cf759c1d9fe5c4f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 00:08:28 -0500 Subject: [PATCH 37/71] Fix environment site import Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index db0e22aa40..c3c33dda24 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -7,6 +7,7 @@ import sys import operator import pkg_resources +import site import six from distutils.sysconfig import get_python_lib @@ -239,7 +240,6 @@ def get_distributions(self): return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) def find_egg(self, egg_dist): - import site site_packages = get_python_lib() search_filename = "{0}.egg-link".format(egg_dist.project_name) try: @@ -476,7 +476,6 @@ def activated(self, include_extras=True, extra_dists=None): os.environ["VIRTUAL_ENV"] = vistir.compat.fs_str(prefix) sys.path = self.sys_path sys.prefix = self.sys_prefix - site = self.safe_import("site") site.addsitedir(self.base_paths["purelib"]) if include_extras: site.addsitedir(parent_path) From 70fc92b08d213209a7f4363c616fe490cef9dc66 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 10:05:21 -0500 Subject: [PATCH 38/71] Fix import errors on setup parsing Signed-off-by: Dan Ryan --- .../requirementslib/models/setup_info.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 561ba15610..6107a24075 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -18,6 +18,7 @@ from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd +from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p from .utils import init_requirement, get_pyproject @@ -220,9 +221,15 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): - with cd(self.setup_py.parent), _suppress_distutils_logs(): + target_cwd = self.setup_py.parent.as_posix() + with cd(target_cwd), _suppress_distutils_logs(): + from setuptools.dist import distutils + script_name = self.setup_py.as_posix() + args = ["egg_info", "--egg-base", self.base_dir] + g = {"__file__": script_name, "__name__": "__main__"} + local_dict = {} if sys.version_info < (3, 5): - save_argv = sys.argv[:] + save_argv = sys.argv else: save_argv = sys.argv.copy() # This is for you, Hynek @@ -230,13 +237,18 @@ def run_setup(self): try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" - script_name = self.setup_py.as_posix() - g = {"__file__": script_name, "__name__": "__main__"} - l = {} sys.argv[0] = script_name - sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + sys.argv[1:] = args with open(script_name, 'rb') as f: - exec(f.read(), g, l) + if sys.version_info < (3, 5): + exec(f.read(), g, local_dict) + else: + exec(f.read(), g) + # We couldn't import everything needed to run setup + except NameError: + python = os.environ.get('PIP_PYTHON_PATH', sys.executable) + out, _ = run([python, "setup.py"] + args, cwd=target_cwd, block=True, + combine_stderr=False, return_object=False, nospin=True) finally: _setup_stop_after = None sys.argv = save_argv From 7e139ad981c44c65c1f2e18d9caed64fc874496f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 10:05:21 -0500 Subject: [PATCH 39/71] Fix import errors on setup parsing Signed-off-by: Dan Ryan --- .../requirementslib/models/setup_info.py | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 561ba15610..6107a24075 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -18,6 +18,7 @@ from six.moves.urllib.parse import unquote from vistir.compat import Path from vistir.contextmanagers import cd +from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p from .utils import init_requirement, get_pyproject @@ -220,9 +221,15 @@ def parse_setup_cfg(self): def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): - with cd(self.setup_py.parent), _suppress_distutils_logs(): + target_cwd = self.setup_py.parent.as_posix() + with cd(target_cwd), _suppress_distutils_logs(): + from setuptools.dist import distutils + script_name = self.setup_py.as_posix() + args = ["egg_info", "--egg-base", self.base_dir] + g = {"__file__": script_name, "__name__": "__main__"} + local_dict = {} if sys.version_info < (3, 5): - save_argv = sys.argv[:] + save_argv = sys.argv else: save_argv = sys.argv.copy() # This is for you, Hynek @@ -230,13 +237,18 @@ def run_setup(self): try: global _setup_distribution, _setup_stop_after _setup_stop_after = "run" - script_name = self.setup_py.as_posix() - g = {"__file__": script_name, "__name__": "__main__"} - l = {} sys.argv[0] = script_name - sys.argv[1:] = ["egg_info", "--egg-base", self.base_dir] + sys.argv[1:] = args with open(script_name, 'rb') as f: - exec(f.read(), g, l) + if sys.version_info < (3, 5): + exec(f.read(), g, local_dict) + else: + exec(f.read(), g) + # We couldn't import everything needed to run setup + except NameError: + python = os.environ.get('PIP_PYTHON_PATH', sys.executable) + out, _ = run([python, "setup.py"] + args, cwd=target_cwd, block=True, + combine_stderr=False, return_object=False, nospin=True) finally: _setup_stop_after = None sys.argv = save_argv From 8502ac96ece321f75b2c10e641d8d2ffba9c9089 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 12:04:58 -0500 Subject: [PATCH 40/71] Revendor Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 1 - pipenv/vendor/pythonfinder/pythonfinder.py | 4 ++-- pipenv/vendor/pythonfinder/utils.py | 2 +- pipenv/vendor/requirementslib/models/lockfile.py | 4 ++-- .../vendor/requirementslib/models/requirements.py | 15 +++++---------- pipenv/vendor/requirementslib/models/utils.py | 1 - pipenv/vendor/vendor.txt | 6 +++--- 7 files changed, 13 insertions(+), 20 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 33b4ab58cb..3d01e7cf14 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -448,7 +448,6 @@ def get_py_version(self): if self.is_dir: return None if self.is_python: - from .python import PythonVersion try: py_version = PythonVersion.from_path(path=self, name=self.name) except InvalidPythonVersion: diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index 854cc8e7cf..b3bad57042 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -65,7 +65,7 @@ def windows_finder(self): def which(self, exe): return self.system_path.which(exe) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_python_version( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): @@ -113,7 +113,7 @@ def find_python_version( major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_all_python_versions( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index ca07b42f76..42a63e54f3 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -90,7 +90,7 @@ def looks_like_python(name): return any(fnmatch(name, rule) for rule in MATCH_RULES) -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def path_is_python(path): return path_is_executable(path) and looks_like_python(path.name) diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 6f61f57ebc..9d19edaf7c 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -223,13 +223,13 @@ def load(cls, path, create=True): try: projectfile = cls.load_projectfile(path, create=create) - except JSONDecodeError as e: + except JSONDecodeError: path = os.path.abspath(path) if not os.path.isdir(path): path = os.path.dirname(path) path = Path(os.path.join(path, "Pipfile.lock")) formatted_path = path.as_posix() - backup_path = "%.bak" % formatted_path + backup_path = "%s.bak" % formatted_path LockfileCorruptException.show(formatted_path, backup_path=backup_path) path.rename(backup_path) cls.load(formatted_path, create=True) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index aafb059b97..d5330b48f4 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -14,7 +14,7 @@ from first import first from packaging.markers import Marker from packaging.requirements import Requirement as PackagingRequirement -from packaging.specifiers import Specifier, SpecifierSet +from packaging.specifiers import Specifier, SpecifierSet, LegacySpecifier, InvalidSpecifier from packaging.utils import canonicalize_name from six.moves.urllib import parse as urllib_parse from six.moves.urllib.parse import unquote @@ -325,9 +325,6 @@ def get_name(self): if setup_name: name = setup_name self._has_hashed_name = False - version = setupinfo_dict.get("version") - if version and not self.version: - self.version = version build_requires = setupinfo_dict.get("build_requires") build_backend = setupinfo_dict.get("build_backend") if build_requires and not self.pyproject_requires: @@ -404,7 +401,6 @@ def create( cls, path=None, uri=None, editable=False, extras=None, link=None, vcs_type=None, name=None, req=None, line=None, uri_scheme=None, setup_path=None, relpath=None ): - import pip_shims.shims if relpath and not path: path = relpath if not path and uri and link.scheme == "file": @@ -455,7 +451,6 @@ def create( creation_kwargs["vcs_type"] = vcs_type _line = None if not name: - import pip_shims.shims _line = unquote(link.url_without_fragment) if link.url else uri if editable: ireq = pip_shims.shims.install_req_from_editable(_line) @@ -1050,8 +1045,6 @@ def copy(self): @classmethod def from_line(cls, line): - import pip_shims.shims - if isinstance(line, pip_shims.shims.InstallRequirement): line = format_requirement(line) hashes = None @@ -1200,7 +1193,6 @@ def from_pipfile(cls, name, pipfile): old_name = cls_inst.req.req.name or cls_inst.req.name if not cls_inst.is_named and not cls_inst.editable and not name: if cls_inst.is_vcs: - import pip_shims.shims ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) info = SetupInfo.from_ireq(ireq) if info is not None: @@ -1276,7 +1268,10 @@ def get_markers(self): return markers def get_specifier(self): - return Specifier(self.specifiers) + try: + return Specifier(self.specifiers) + except InvalidSpecifier: + return LegacySpecifier(self.specifiers) def get_version(self): return pip_shims.shims.parse_version(self.get_specifier().version) diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 2b47ee9bb0..0fac2aa3f8 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -95,7 +95,6 @@ def build_vcs_link(vcs, uri, name=None, ref=None, subdirectory=None, extras=None if extras: extras = extras_to_string(extras) uri = "{0}{1}".format(uri, extras) - # if subdirectory: if subdirectory: uri = "{0}&subdirectory={1}".format(uri, subdirectory) return create_link(uri) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 45ff0384c0..c106a59c56 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -21,20 +21,20 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.7 +pythonfinder==1.1.8 requests==2.20.0 chardet==3.0.4 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.2.5 +requirementslib==1.3.0 attrs==18.2.0 distlib==0.2.8 packaging==18.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.4.6 + tomlkit==0.5.2 shellingham==1.2.7 six==1.11.0 semver==2.8.1 From 013e3d0ec22607769ac749544c1e5f20ade261c2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 12:04:58 -0500 Subject: [PATCH 41/71] Revendor Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 1 - pipenv/vendor/pythonfinder/pythonfinder.py | 4 ++-- pipenv/vendor/pythonfinder/utils.py | 2 +- pipenv/vendor/requirementslib/models/lockfile.py | 4 ++-- .../vendor/requirementslib/models/requirements.py | 15 +++++---------- pipenv/vendor/requirementslib/models/utils.py | 1 - pipenv/vendor/vendor.txt | 6 +++--- 7 files changed, 13 insertions(+), 20 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 33b4ab58cb..3d01e7cf14 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -448,7 +448,6 @@ def get_py_version(self): if self.is_dir: return None if self.is_python: - from .python import PythonVersion try: py_version = PythonVersion.from_path(path=self, name=self.name) except InvalidPythonVersion: diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index 854cc8e7cf..b3bad57042 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -65,7 +65,7 @@ def windows_finder(self): def which(self, exe): return self.system_path.which(exe) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_python_version( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): @@ -113,7 +113,7 @@ def find_python_version( major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) - @lru_cache(maxsize=128) + @lru_cache(maxsize=1024) def find_all_python_versions( self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index ca07b42f76..42a63e54f3 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -90,7 +90,7 @@ def looks_like_python(name): return any(fnmatch(name, rule) for rule in MATCH_RULES) -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def path_is_python(path): return path_is_executable(path) and looks_like_python(path.name) diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 6f61f57ebc..9d19edaf7c 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -223,13 +223,13 @@ def load(cls, path, create=True): try: projectfile = cls.load_projectfile(path, create=create) - except JSONDecodeError as e: + except JSONDecodeError: path = os.path.abspath(path) if not os.path.isdir(path): path = os.path.dirname(path) path = Path(os.path.join(path, "Pipfile.lock")) formatted_path = path.as_posix() - backup_path = "%.bak" % formatted_path + backup_path = "%s.bak" % formatted_path LockfileCorruptException.show(formatted_path, backup_path=backup_path) path.rename(backup_path) cls.load(formatted_path, create=True) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index aafb059b97..d5330b48f4 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -14,7 +14,7 @@ from first import first from packaging.markers import Marker from packaging.requirements import Requirement as PackagingRequirement -from packaging.specifiers import Specifier, SpecifierSet +from packaging.specifiers import Specifier, SpecifierSet, LegacySpecifier, InvalidSpecifier from packaging.utils import canonicalize_name from six.moves.urllib import parse as urllib_parse from six.moves.urllib.parse import unquote @@ -325,9 +325,6 @@ def get_name(self): if setup_name: name = setup_name self._has_hashed_name = False - version = setupinfo_dict.get("version") - if version and not self.version: - self.version = version build_requires = setupinfo_dict.get("build_requires") build_backend = setupinfo_dict.get("build_backend") if build_requires and not self.pyproject_requires: @@ -404,7 +401,6 @@ def create( cls, path=None, uri=None, editable=False, extras=None, link=None, vcs_type=None, name=None, req=None, line=None, uri_scheme=None, setup_path=None, relpath=None ): - import pip_shims.shims if relpath and not path: path = relpath if not path and uri and link.scheme == "file": @@ -455,7 +451,6 @@ def create( creation_kwargs["vcs_type"] = vcs_type _line = None if not name: - import pip_shims.shims _line = unquote(link.url_without_fragment) if link.url else uri if editable: ireq = pip_shims.shims.install_req_from_editable(_line) @@ -1050,8 +1045,6 @@ def copy(self): @classmethod def from_line(cls, line): - import pip_shims.shims - if isinstance(line, pip_shims.shims.InstallRequirement): line = format_requirement(line) hashes = None @@ -1200,7 +1193,6 @@ def from_pipfile(cls, name, pipfile): old_name = cls_inst.req.req.name or cls_inst.req.name if not cls_inst.is_named and not cls_inst.editable and not name: if cls_inst.is_vcs: - import pip_shims.shims ireq = pip_shims.shims.install_req_from_req(cls_inst.as_line(include_hashes=False)) info = SetupInfo.from_ireq(ireq) if info is not None: @@ -1276,7 +1268,10 @@ def get_markers(self): return markers def get_specifier(self): - return Specifier(self.specifiers) + try: + return Specifier(self.specifiers) + except InvalidSpecifier: + return LegacySpecifier(self.specifiers) def get_version(self): return pip_shims.shims.parse_version(self.get_specifier().version) diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 2b47ee9bb0..0fac2aa3f8 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -95,7 +95,6 @@ def build_vcs_link(vcs, uri, name=None, ref=None, subdirectory=None, extras=None if extras: extras = extras_to_string(extras) uri = "{0}{1}".format(uri, extras) - # if subdirectory: if subdirectory: uri = "{0}&subdirectory={1}".format(uri, subdirectory) return create_link(uri) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 45ff0384c0..c106a59c56 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -21,20 +21,20 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.7 +pythonfinder==1.1.8 requests==2.20.0 chardet==3.0.4 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.2.5 +requirementslib==1.3.0 attrs==18.2.0 distlib==0.2.8 packaging==18.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.4.6 + tomlkit==0.5.2 shellingham==1.2.7 six==1.11.0 semver==2.8.1 From 96cbd58d84a04d319c422e3e2fcc96c184e48efb Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 13:36:58 -0500 Subject: [PATCH 42/71] Fix prefix comparison for py2 Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index c3c33dda24..b96f2fbe26 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) if not sources: sources = [] self.sources = sources From 0c7f287ec389b4253d25a53d4bc36973ea47d92f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 13:36:58 -0500 Subject: [PATCH 43/71] Fix prefix comparison for py2 Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index c3c33dda24..b96f2fbe26 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(prefix), sys.prefix) + self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) if not sources: sources = [] self.sources = sources From d7d50ef3b745c822fd4b3aff509718fc3b57f40f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 16:31:53 -0500 Subject: [PATCH 44/71] no samefile for windows python2.7 Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index b96f2fbe26..9315447c30 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,8 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) + prefix = os.path.normcase(os.path.normpath(os.path.abspath(str(prefix)))) + self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] self.sources = sources From 9eabde0bbfa3dc61d71fc1efbc741047dc417663 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 16:31:53 -0500 Subject: [PATCH 45/71] no samefile for windows python2.7 Signed-off-by: Dan Ryan --- pipenv/environment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index b96f2fbe26..9315447c30 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -26,7 +26,8 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - self.is_venv = not os.path.samefile(os.path.abspath(str(prefix)), sys.prefix) + prefix = os.path.normcase(os.path.normpath(os.path.abspath(str(prefix)))) + self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] self.sources = sources From ef59d1520e9b84bd611587a2fd79680df19d53d9 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 22:27:38 -0500 Subject: [PATCH 46/71] Fix bugs in environment implementation - Fix virtualenv - Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/core.py | 5 +- pipenv/environment.py | 33 +++++----- pipenv/project.py | 5 +- pipenv/vendor/pythonfinder/environment.py | 4 ++ pipenv/vendor/pythonfinder/models/asdf.py | 9 +++ pipenv/vendor/pythonfinder/models/path.py | 69 +++++++++++++++------ pipenv/vendor/pythonfinder/models/pyenv.py | 7 ++- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/pythonfinder/utils.py | 6 +- 9 files changed, 96 insertions(+), 44 deletions(-) create mode 100644 pipenv/vendor/pythonfinder/models/asdf.py diff --git a/pipenv/core.py b/pipenv/core.py index c2df7b78a7..6576648ff4 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -921,7 +921,8 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): prefix=project.get_location_for_virtualenv(), is_venv=True, sources=sources, - pipfile=project.parsed_pipfile + pipfile=project.parsed_pipfile, + project=project ) project._environment.add_dist("pipenv") # Say where the virtualenv is. @@ -1621,7 +1622,7 @@ def do_outdated(pypi_mirror=None): outdated_packages = { canonicalize_name(pkg.project_name): package_info (pkg.project_name, pkg.parsed_version, pkg.latest_version) - for pkg in project.get_outdated_packages() + for pkg in project.environment.get_outdated_packages() } for result in installed_packages: dep = Requirement.from_line(str(result.as_requirement())) diff --git a/pipenv/environment.py b/pipenv/environment.py index 9315447c30..8548c38f52 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -22,7 +22,8 @@ class Environment(object): - def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, sources=None): + def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, + sources=None, project=None): super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET @@ -30,10 +31,17 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] + self.project = project + if project and not sources: + sources = project.sources self.sources = sources + if project and not pipfile: + pipfile = project.pipfile + self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix self.prefix = vistir.compat.Path(prefix) + self.sys_paths = get_paths() def safe_import(self, name): """Helper utility for reimporting previously imported modules while inside the env""" @@ -73,7 +81,7 @@ def resolve_dist(cls, dist, working_set): deps.add(dist) try: reqs = dist.requires() - except AttributeError: + except (AttributeError, OSError): # The METADATA file can't be found return deps for req in reqs: dist = working_set.find(req) @@ -187,12 +195,6 @@ def sys_path(self): path = json.loads(path.strip()) return path - @cached_property - def system_paths(self): - paths = {} - paths = get_paths() - return paths - @cached_property def sys_prefix(self): """The prefix run inside the context of the environment @@ -271,7 +273,8 @@ def get_installed_packages(self): packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] return packages - def get_finder(self): + @contextlib.contextmanager + def get_finder(self, pre=False): from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder from .environments import PIPENV_CACHE_DIR index_urls = [source.get("url") for source in self.sources] @@ -286,10 +289,10 @@ class PipCommand(Command): cmd_opts = pip_command.cmd_opts pip_command.parser.insert_option_group(0, index_opts) pip_command.parser.insert_option_group(0, cmd_opts) - pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources, []) + pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources) pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = PIPENV_CACHE_DIR - pip_options.pre = self.pipfile.get("pre", False) + pip_options.pre = self.pipfile.get("pre", pre) with pip_command._build_session(pip_options) as session: finder = PackageFinder( find_links=pip_options.find_links, @@ -300,7 +303,7 @@ class PipCommand(Command): ) yield finder - def get_package_info(self): + def get_package_info(self, pre=False): dependency_links = [] packages = self.get_installed_packages() # This code is borrowed from pip's current implementation @@ -314,7 +317,7 @@ def get_package_info(self): for dist in packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) - if not finder.pip_options.pre: + if not self.pipfile.get("pre", finder.allow_all_prereleases): # Remove prereleases all_candidates = [ candidate for candidate in all_candidates @@ -334,9 +337,9 @@ def get_package_info(self): dist.latest_filetype = typ yield dist - def get_outdated_packages(self): + def get_outdated_packages(self, pre=False): return [ - pkg for pkg in self.get_package_info() + pkg for pkg in self.get_package_info(pre=pre) if pkg.latest_version._version > pkg.parsed_version._version ] diff --git a/pipenv/project.py b/pipenv/project.py index 0eafff8e79..7857b25a82 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -352,13 +352,14 @@ def environment(self): is_venv = prefix == sys.prefix sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( - prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile, + project=self ) self._environment.add_dist("pipenv") return self._environment def get_outdated_packages(self): - return self.environment.get_outdated_packages() + return self.environment.get_outdated_packages(pre=self.pipfile.get("pre", False)) @classmethod def _sanitize(cls, name): diff --git a/pipenv/vendor/pythonfinder/environment.py b/pipenv/vendor/pythonfinder/environment.py index 27a5b3fc99..ec4a760fac 100644 --- a/pipenv/vendor/pythonfinder/environment.py +++ b/pipenv/vendor/pythonfinder/environment.py @@ -7,9 +7,13 @@ PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) +ASDF_INSTALLED = bool(os.environ.get("ASDF_DATA_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) +ASDF_DATA_DIR = os.path.expanduser( + os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) +) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] diff --git a/pipenv/vendor/pythonfinder/models/asdf.py b/pipenv/vendor/pythonfinder/models/asdf.py new file mode 100644 index 0000000000..3ba6e4faba --- /dev/null +++ b/pipenv/vendor/pythonfinder/models/asdf.py @@ -0,0 +1,9 @@ +# -*- coding=utf-8 -*- +import attr + +from .pyenv import PyenvFinder + + +@attr.s +class AsdfFinder(PyenvFinder): + version_root = attr.ib(default="installs/python/*") diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 3d01e7cf14..9c96e5f82f 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -17,7 +17,7 @@ from vistir.compat import Path, fs_str from .mixins import BasePath -from ..environment import PYENV_INSTALLED, PYENV_ROOT +from ..environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR from ..exceptions import InvalidPythonVersion from ..utils import ( ensure_path, @@ -40,6 +40,7 @@ class SystemPath(object): python_version_dict = attr.ib(default=attr.Factory(defaultdict)) only_python = attr.ib(default=False) pyenv_finder = attr.ib(default=None, validator=optional_instance_of("PyenvPath")) + asdf_finder = attr.ib(default=None) system = attr.ib(default=False) _version_dict = attr.ib(default=attr.Factory(defaultdict)) ignore_unsupported = attr.ib(default=False) @@ -105,6 +106,8 @@ def __attrs_post_init__(self): self._setup_windows() if PYENV_INSTALLED: self._setup_pyenv() + if ASDF_INSTALLED: + self._setup_asdf() venv = os.environ.get("VIRTUAL_ENV") if os.name == "nt": bin_dir = "Scripts" @@ -124,32 +127,62 @@ def __attrs_post_init__(self): path=syspath_bin, is_root=True, only_python=False ) - def _setup_pyenv(self): - from .pyenv import PyenvFinder - - last_pyenv = next( - (p for p in reversed(self.path_order) if PYENV_ROOT.lower() in p.lower()), + def _get_last_instance(self, path): + last_instance = next(iter( + (p for p in reversed(self.path_order) if path.lower() in p.lower())), None, ) try: - pyenv_index = self.path_order.index(last_pyenv) + path_index = self.path_order.index(last_instance) except ValueError: return + return path_index + + def _slice_in_paths(self, start_idx, paths): + before_path = self.path_order[: start_idx + 1] + after_path = self.path_order[start_idx + 2 :] + self.path_order = ( + before_path + [p.as_posix() for p in paths] + after_path + ) + + def _remove_path(self, path): + path_copy = reversed(self.path_order[:]) + new_order = [] + target = os.path.normcase(os.path.normpath(os.path.abspath(path))) + path_map = { + os.path.normcase(os.path.normpath(os.path.abspath(pth))): pth + for pth in self.paths.keys() + } + if target in path_map: + del self.paths[path_map.get(target)] + for current_path in path_copy: + normalized = os.path.normcase(os.path.normpath(os.path.abspath(current_path))) + if normalized != target: + new_order.append(normalized) + new_order = reversed(new_order) + self.path_order = new_order + + def _setup_asdf(self): + from .asdf import AsdfFinder + asdf_index = self._get_last_instance(ASDF_DATA_DIR) + self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) + root_paths = [p for p in self.asdf_finder.roots] + self._slice_in_paths(asdf_index, root_paths) + self.paths.update(self.asdf_finder.roots) + self._register_finder("asdf", self.asdf_finder) + + def _setup_pyenv(self): + from .pyenv import PyenvFinder + + pyenv_index = self._get_last_instance(PYENV_ROOT) self.pyenv_finder = PyenvFinder.create( root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) root_paths = [p for p in self.pyenv_finder.roots] - before_path = self.path_order[: pyenv_index + 1] - after_path = self.path_order[pyenv_index + 2 :] - self.path_order = ( - before_path + [p.as_posix() for p in root_paths] + after_path - ) - pyenv_shim_path = os.path.join(PYENV_ROOT, "shims") - if pyenv_shim_path in self.path_order: - self.path_order.remove(pyenv_shim_path) + self._slice_in_paths(pyenv_index, root_paths) + self.paths.update(self.pyenv_finder.roots) - if pyenv_shim_path in self.paths: - del self.paths[pyenv_shim_path] + self._remove_path(os.path.join(PYENV_ROOT, "shims")) self._register_finder("pyenv", self.pyenv_finder) def _setup_windows(self): @@ -396,7 +429,7 @@ def create( ) -@attr.s +@attr.s(slots=True) class PathEntry(BasePath): path = attr.ib(default=None, validator=optional_instance_of(Path)) _children = attr.ib(default=attr.Factory(dict)) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index ac7f8588ac..cf85f57a98 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) -@attr.s +@attr.s(slots=True) class PyenvFinder(BaseFinder, BasePath): root = attr.ib(default=None, validator=optional_instance_of(Path)) #: ignore_unsupported should come before versions, because its value is used @@ -34,6 +34,7 @@ class PyenvFinder(BaseFinder, BasePath): ignore_unsupported = attr.ib(default=True) paths = attr.ib(default=attr.Factory(list)) roots = attr.ib(default=attr.Factory(defaultdict)) + version_root = attr.ib(default="versions/*") versions = attr.ib() pythons = attr.ib() @@ -50,7 +51,7 @@ def get_version_order(self): version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() version_paths = [ - p for p in self.root.glob("versions/*") + p for p in self.root.glob(self.version_root) if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} @@ -74,7 +75,7 @@ def version_from_bin_dir(cls, base_dir, name=None): @versions.default def get_versions(self): versions = defaultdict() - bin_ = sysconfig._INSTALL_SCHEMES['posix_prefix']["scripts"] + bin_ = "{base}/bin" for p in self.get_version_order(): bin_dir = Path(bin_.format(base=p.as_posix())) version_path = None diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 24d520b6d2..583dc6b38a 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -21,7 +21,7 @@ ) -@attr.s +@attr.s(slots=True) class PythonVersion(object): major = attr.ib(default=0) minor = attr.ib(default=None) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 42a63e54f3..881cdb2eec 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -54,7 +54,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -92,7 +92,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_executable(path) and looks_like_python(path.name) + return path_is_known_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -117,7 +117,7 @@ def _filter_none(k, v): return False -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def filter_pythons(path): """Return all valid pythons in a given path""" if not isinstance(path, vistir.compat.Path): From 9296f561d91f847ad65279b41c458d41a67f7ff7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 22:27:38 -0500 Subject: [PATCH 47/71] Fix bugs in environment implementation - Fix virtualenv - Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/core.py | 5 +- pipenv/environment.py | 33 +++++----- pipenv/project.py | 5 +- pipenv/vendor/pythonfinder/environment.py | 4 ++ pipenv/vendor/pythonfinder/models/asdf.py | 9 +++ pipenv/vendor/pythonfinder/models/path.py | 69 +++++++++++++++------ pipenv/vendor/pythonfinder/models/pyenv.py | 7 ++- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/pythonfinder/utils.py | 6 +- 9 files changed, 96 insertions(+), 44 deletions(-) create mode 100644 pipenv/vendor/pythonfinder/models/asdf.py diff --git a/pipenv/core.py b/pipenv/core.py index ee1a7df805..8504553c27 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -926,7 +926,8 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): prefix=project.get_location_for_virtualenv(), is_venv=True, sources=sources, - pipfile=project.parsed_pipfile + pipfile=project.parsed_pipfile, + project=project ) project._environment.add_dist("pipenv") # Say where the virtualenv is. @@ -1626,7 +1627,7 @@ def do_outdated(pypi_mirror=None): outdated_packages = { canonicalize_name(pkg.project_name): package_info (pkg.project_name, pkg.parsed_version, pkg.latest_version) - for pkg in project.get_outdated_packages() + for pkg in project.environment.get_outdated_packages() } for result in installed_packages: dep = Requirement.from_line(str(result.as_requirement())) diff --git a/pipenv/environment.py b/pipenv/environment.py index 9315447c30..8548c38f52 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -22,7 +22,8 @@ class Environment(object): - def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, sources=None): + def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=None, + sources=None, project=None): super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET @@ -30,10 +31,17 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) if not sources: sources = [] + self.project = project + if project and not sources: + sources = project.sources self.sources = sources + if project and not pipfile: + pipfile = project.pipfile + self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix self.prefix = vistir.compat.Path(prefix) + self.sys_paths = get_paths() def safe_import(self, name): """Helper utility for reimporting previously imported modules while inside the env""" @@ -73,7 +81,7 @@ def resolve_dist(cls, dist, working_set): deps.add(dist) try: reqs = dist.requires() - except AttributeError: + except (AttributeError, OSError): # The METADATA file can't be found return deps for req in reqs: dist = working_set.find(req) @@ -187,12 +195,6 @@ def sys_path(self): path = json.loads(path.strip()) return path - @cached_property - def system_paths(self): - paths = {} - paths = get_paths() - return paths - @cached_property def sys_prefix(self): """The prefix run inside the context of the environment @@ -271,7 +273,8 @@ def get_installed_packages(self): packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] return packages - def get_finder(self): + @contextlib.contextmanager + def get_finder(self, pre=False): from .vendor.pip_shims import Command, cmdoptions, index_group, PackageFinder from .environments import PIPENV_CACHE_DIR index_urls = [source.get("url") for source in self.sources] @@ -286,10 +289,10 @@ class PipCommand(Command): cmd_opts = pip_command.cmd_opts pip_command.parser.insert_option_group(0, index_opts) pip_command.parser.insert_option_group(0, cmd_opts) - pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources, []) + pip_args = self._modules["pipenv"].utils.prepare_pip_source_args(self.sources) pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = PIPENV_CACHE_DIR - pip_options.pre = self.pipfile.get("pre", False) + pip_options.pre = self.pipfile.get("pre", pre) with pip_command._build_session(pip_options) as session: finder = PackageFinder( find_links=pip_options.find_links, @@ -300,7 +303,7 @@ class PipCommand(Command): ) yield finder - def get_package_info(self): + def get_package_info(self, pre=False): dependency_links = [] packages = self.get_installed_packages() # This code is borrowed from pip's current implementation @@ -314,7 +317,7 @@ def get_package_info(self): for dist in packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) - if not finder.pip_options.pre: + if not self.pipfile.get("pre", finder.allow_all_prereleases): # Remove prereleases all_candidates = [ candidate for candidate in all_candidates @@ -334,9 +337,9 @@ def get_package_info(self): dist.latest_filetype = typ yield dist - def get_outdated_packages(self): + def get_outdated_packages(self, pre=False): return [ - pkg for pkg in self.get_package_info() + pkg for pkg in self.get_package_info(pre=pre) if pkg.latest_version._version > pkg.parsed_version._version ] diff --git a/pipenv/project.py b/pipenv/project.py index 0eafff8e79..7857b25a82 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -352,13 +352,14 @@ def environment(self): is_venv = prefix == sys.prefix sources = self.sources if self.sources else [DEFAULT_SOURCE,] self._environment = Environment( - prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile + prefix=prefix, is_venv=is_venv, sources=sources, pipfile=self.parsed_pipfile, + project=self ) self._environment.add_dist("pipenv") return self._environment def get_outdated_packages(self): - return self.environment.get_outdated_packages() + return self.environment.get_outdated_packages(pre=self.pipfile.get("pre", False)) @classmethod def _sanitize(cls, name): diff --git a/pipenv/vendor/pythonfinder/environment.py b/pipenv/vendor/pythonfinder/environment.py index 27a5b3fc99..ec4a760fac 100644 --- a/pipenv/vendor/pythonfinder/environment.py +++ b/pipenv/vendor/pythonfinder/environment.py @@ -7,9 +7,13 @@ PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) +ASDF_INSTALLED = bool(os.environ.get("ASDF_DATA_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) +ASDF_DATA_DIR = os.path.expanduser( + os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) +) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] diff --git a/pipenv/vendor/pythonfinder/models/asdf.py b/pipenv/vendor/pythonfinder/models/asdf.py new file mode 100644 index 0000000000..3ba6e4faba --- /dev/null +++ b/pipenv/vendor/pythonfinder/models/asdf.py @@ -0,0 +1,9 @@ +# -*- coding=utf-8 -*- +import attr + +from .pyenv import PyenvFinder + + +@attr.s +class AsdfFinder(PyenvFinder): + version_root = attr.ib(default="installs/python/*") diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 3d01e7cf14..9c96e5f82f 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -17,7 +17,7 @@ from vistir.compat import Path, fs_str from .mixins import BasePath -from ..environment import PYENV_INSTALLED, PYENV_ROOT +from ..environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR from ..exceptions import InvalidPythonVersion from ..utils import ( ensure_path, @@ -40,6 +40,7 @@ class SystemPath(object): python_version_dict = attr.ib(default=attr.Factory(defaultdict)) only_python = attr.ib(default=False) pyenv_finder = attr.ib(default=None, validator=optional_instance_of("PyenvPath")) + asdf_finder = attr.ib(default=None) system = attr.ib(default=False) _version_dict = attr.ib(default=attr.Factory(defaultdict)) ignore_unsupported = attr.ib(default=False) @@ -105,6 +106,8 @@ def __attrs_post_init__(self): self._setup_windows() if PYENV_INSTALLED: self._setup_pyenv() + if ASDF_INSTALLED: + self._setup_asdf() venv = os.environ.get("VIRTUAL_ENV") if os.name == "nt": bin_dir = "Scripts" @@ -124,32 +127,62 @@ def __attrs_post_init__(self): path=syspath_bin, is_root=True, only_python=False ) - def _setup_pyenv(self): - from .pyenv import PyenvFinder - - last_pyenv = next( - (p for p in reversed(self.path_order) if PYENV_ROOT.lower() in p.lower()), + def _get_last_instance(self, path): + last_instance = next(iter( + (p for p in reversed(self.path_order) if path.lower() in p.lower())), None, ) try: - pyenv_index = self.path_order.index(last_pyenv) + path_index = self.path_order.index(last_instance) except ValueError: return + return path_index + + def _slice_in_paths(self, start_idx, paths): + before_path = self.path_order[: start_idx + 1] + after_path = self.path_order[start_idx + 2 :] + self.path_order = ( + before_path + [p.as_posix() for p in paths] + after_path + ) + + def _remove_path(self, path): + path_copy = reversed(self.path_order[:]) + new_order = [] + target = os.path.normcase(os.path.normpath(os.path.abspath(path))) + path_map = { + os.path.normcase(os.path.normpath(os.path.abspath(pth))): pth + for pth in self.paths.keys() + } + if target in path_map: + del self.paths[path_map.get(target)] + for current_path in path_copy: + normalized = os.path.normcase(os.path.normpath(os.path.abspath(current_path))) + if normalized != target: + new_order.append(normalized) + new_order = reversed(new_order) + self.path_order = new_order + + def _setup_asdf(self): + from .asdf import AsdfFinder + asdf_index = self._get_last_instance(ASDF_DATA_DIR) + self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) + root_paths = [p for p in self.asdf_finder.roots] + self._slice_in_paths(asdf_index, root_paths) + self.paths.update(self.asdf_finder.roots) + self._register_finder("asdf", self.asdf_finder) + + def _setup_pyenv(self): + from .pyenv import PyenvFinder + + pyenv_index = self._get_last_instance(PYENV_ROOT) self.pyenv_finder = PyenvFinder.create( root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) root_paths = [p for p in self.pyenv_finder.roots] - before_path = self.path_order[: pyenv_index + 1] - after_path = self.path_order[pyenv_index + 2 :] - self.path_order = ( - before_path + [p.as_posix() for p in root_paths] + after_path - ) - pyenv_shim_path = os.path.join(PYENV_ROOT, "shims") - if pyenv_shim_path in self.path_order: - self.path_order.remove(pyenv_shim_path) + self._slice_in_paths(pyenv_index, root_paths) + self.paths.update(self.pyenv_finder.roots) - if pyenv_shim_path in self.paths: - del self.paths[pyenv_shim_path] + self._remove_path(os.path.join(PYENV_ROOT, "shims")) self._register_finder("pyenv", self.pyenv_finder) def _setup_windows(self): @@ -396,7 +429,7 @@ def create( ) -@attr.s +@attr.s(slots=True) class PathEntry(BasePath): path = attr.ib(default=None, validator=optional_instance_of(Path)) _children = attr.ib(default=attr.Factory(dict)) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index ac7f8588ac..cf85f57a98 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) -@attr.s +@attr.s(slots=True) class PyenvFinder(BaseFinder, BasePath): root = attr.ib(default=None, validator=optional_instance_of(Path)) #: ignore_unsupported should come before versions, because its value is used @@ -34,6 +34,7 @@ class PyenvFinder(BaseFinder, BasePath): ignore_unsupported = attr.ib(default=True) paths = attr.ib(default=attr.Factory(list)) roots = attr.ib(default=attr.Factory(defaultdict)) + version_root = attr.ib(default="versions/*") versions = attr.ib() pythons = attr.ib() @@ -50,7 +51,7 @@ def get_version_order(self): version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() version_paths = [ - p for p in self.root.glob("versions/*") + p for p in self.root.glob(self.version_root) if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} @@ -74,7 +75,7 @@ def version_from_bin_dir(cls, base_dir, name=None): @versions.default def get_versions(self): versions = defaultdict() - bin_ = sysconfig._INSTALL_SCHEMES['posix_prefix']["scripts"] + bin_ = "{base}/bin" for p in self.get_version_order(): bin_dir = Path(bin_.format(base=p.as_posix())) version_path = None diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 24d520b6d2..583dc6b38a 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -21,7 +21,7 @@ ) -@attr.s +@attr.s(slots=True) class PythonVersion(object): major = attr.ib(default=0) minor = attr.ib(default=None) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 42a63e54f3..881cdb2eec 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -54,7 +54,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -92,7 +92,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_executable(path) and looks_like_python(path.name) + return path_is_known_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -117,7 +117,7 @@ def _filter_none(k, v): return False -@lru_cache(maxsize=128) +@lru_cache(maxsize=1024) def filter_pythons(path): """Return all valid pythons in a given path""" if not isinstance(path, vistir.compat.Path): From e62b8005068c3a12401c2a820d370e854ecdd6f7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 23:22:48 -0500 Subject: [PATCH 48/71] Fix syntax Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 8548c38f52..8e96f2c8fd 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -36,7 +36,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No sources = project.sources self.sources = sources if project and not pipfile: - pipfile = project.pipfile + pipfile = project.parsed_pipfile self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix From cb601b0e5b67ec76d47b9a256e328da93bdb7574 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 12 Nov 2018 23:22:48 -0500 Subject: [PATCH 49/71] Fix syntax Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 8548c38f52..8e96f2c8fd 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -36,7 +36,7 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No sources = project.sources self.sources = sources if project and not pipfile: - pipfile = project.pipfile + pipfile = project.parsed_pipfile self.pipfile = pipfile self.extra_dists = [] prefix = prefix if prefix else sys.prefix From 13c9e62029184b8b30de3afdc7b51b1cae9da062 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 00:39:20 -0500 Subject: [PATCH 50/71] Update pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 21 +++++++++++++++++---- pipenv/vendor/pythonfinder/utils.py | 12 ++++++------ 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 9c96e5f82f..523f117a63 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -26,6 +26,7 @@ optional_instance_of, path_is_known_executable, unnest, + normalize_path ) from .python import PythonVersion @@ -128,9 +129,10 @@ def __attrs_post_init__(self): ) def _get_last_instance(self, path): - last_instance = next(iter( - (p for p in reversed(self.path_order) if path.lower() in p.lower())), - None, + paths = [normalize_path(p) for p in reversed(self.path_order)] + normalized_target = normalize_path(path) + last_instance = next( + iter(p for p in paths if normalized_target in p), None ) try: path_index = self.path_order.index(last_instance) @@ -165,6 +167,10 @@ def _remove_path(self, path): def _setup_asdf(self): from .asdf import AsdfFinder asdf_index = self._get_last_instance(ASDF_DATA_DIR) + if not asdf_index: + # we are in a virtualenv without global pyenv on the path, so we should + # not write pyenv to the path here + return self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) root_paths = [p for p in self.asdf_finder.roots] self._slice_in_paths(asdf_index, root_paths) @@ -174,10 +180,14 @@ def _setup_asdf(self): def _setup_pyenv(self): from .pyenv import PyenvFinder - pyenv_index = self._get_last_instance(PYENV_ROOT) self.pyenv_finder = PyenvFinder.create( root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) + pyenv_index = self._get_last_instance(PYENV_ROOT) + if not pyenv_index: + # we are in a virtualenv without global pyenv on the path, so we should + # not write pyenv to the path here + return root_paths = [p for p in self.pyenv_finder.roots] self._slice_in_paths(pyenv_index, root_paths) @@ -485,6 +495,9 @@ def get_py_version(self): py_version = PythonVersion.from_path(path=self, name=self.name) except InvalidPythonVersion: py_version = None + except Exception: + if not IGNORE_UNSUPPORTED: + raise return py_version return diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 881cdb2eec..b8714f52b6 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -2,13 +2,9 @@ from __future__ import absolute_import, print_function import itertools -import locale import os -import subprocess -import sys from fnmatch import fnmatch -from itertools import chain import attr import six @@ -54,7 +50,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -92,7 +88,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_known_executable(path) and looks_like_python(path.name) + return path_is_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -117,6 +113,10 @@ def _filter_none(k, v): return False +def normalize_path(path): + return os.path.normpath(os.path.normcase(os.path.abspath(path))) + + @lru_cache(maxsize=1024) def filter_pythons(path): """Return all valid pythons in a given path""" From d73879b5e57773973fdeb362d97bef7f46762046 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 01:01:49 -0500 Subject: [PATCH 51/71] Update requirementslib - Fix ref parsing - Fixes #3214 Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/models/requirements.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 05fd19438c..f6c985d303 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.6' +__version__ = '1.2.7' import logging import warnings diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index d5330b48f4..d034a12d95 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -891,8 +891,8 @@ def from_line(cls, line, editable=None, extras=None): name = link.egg_fragment subdirectory = link.subdirectory_fragment ref = None - if "@" in link.show_url and "@" in uri: - uri, ref = uri.rsplit("@", 1) + if "@" in link.path and "@" in uri: + uri, _, ref = uri.rpartition("@") if relpath and "@" in relpath: relpath, ref = relpath.rsplit("@", 1) return cls( From e328ae24dfb61e5d207f1a64f85b9ea949092ed1 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 01:23:19 -0500 Subject: [PATCH 52/71] Fix feedback Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 6 +++--- pipenv/vendor/pythonfinder/utils.py | 2 +- pipenv/vendor/vendor.txt | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 523f117a63..df755fa63b 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -150,15 +150,15 @@ def _slice_in_paths(self, start_idx, paths): def _remove_path(self, path): path_copy = reversed(self.path_order[:]) new_order = [] - target = os.path.normcase(os.path.normpath(os.path.abspath(path))) + target = normalize_path(path) path_map = { - os.path.normcase(os.path.normpath(os.path.abspath(pth))): pth + normalize_path(pth): pth for pth in self.paths.keys() } if target in path_map: del self.paths[path_map.get(target)] for current_path in path_copy: - normalized = os.path.normcase(os.path.normpath(os.path.abspath(current_path))) + normalized = normalize_path(current_path) if normalized != target: new_order.append(normalized) new_order = reversed(new_order) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index b8714f52b6..fb932b10e9 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -114,7 +114,7 @@ def _filter_none(k, v): def normalize_path(path): - return os.path.normpath(os.path.normcase(os.path.abspath(path))) + return os.path.normpath(os.path.normcase(os.path.abspath(str(path)))) @lru_cache(maxsize=1024) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c106a59c56..623178531c 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -34,7 +34,7 @@ requirementslib==1.3.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.5.2 + tomlkit==0.4.6 shellingham==1.2.7 six==1.11.0 semver==2.8.1 From 310e0b293bf5febd035430d2a2f80e5c3158c5c8 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 10:17:19 -0500 Subject: [PATCH 53/71] Fix pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 43 +++- pipenv/vendor/pythonfinder/models/pyenv.py | 2 - pipenv/vendor/pythonfinder/models/python.py | 221 +++++++++++++++++++- pipenv/vendor/pythonfinder/utils.py | 35 +++- 4 files changed, 284 insertions(+), 17 deletions(-) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index df755fa63b..d3cdd9d168 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -26,7 +26,9 @@ optional_instance_of, path_is_known_executable, unnest, - normalize_path + normalize_path, + parse_pyenv_version_order, + parse_asdf_version_order ) from .python import PythonVersion @@ -165,23 +167,26 @@ def _remove_path(self, path): self.path_order = new_order def _setup_asdf(self): - from .asdf import AsdfFinder + from .python import PythonFinder asdf_index = self._get_last_instance(ASDF_DATA_DIR) if not asdf_index: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here return - self.asdf_finder = AsdfFinder.create(root=ASDF_DATA_DIR, ignore_unsupported=True) + self.asdf_finder = PythonFinder.create( + root=ASDF_DATA_DIR, ignore_unsupported=True, + sort_function=parse_asdf_version_order, version_glob_path="installs/python/*") root_paths = [p for p in self.asdf_finder.roots] self._slice_in_paths(asdf_index, root_paths) self.paths.update(self.asdf_finder.roots) self._register_finder("asdf", self.asdf_finder) def _setup_pyenv(self): - from .pyenv import PyenvFinder + from .python import PythonFinder - self.pyenv_finder = PyenvFinder.create( - root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported + self.pyenv_finder = PythonFinder.create( + root=PYENV_ROOT, sort_function=parse_pyenv_version_order, + version_glob_path="versions/*", ignore_unsupported=self.ignore_unsupported ) pyenv_index = self._get_last_instance(PYENV_ROOT) if not pyenv_index: @@ -585,3 +590,29 @@ def is_python(self): return self.is_executable and ( looks_like_python(self.path.name) ) + + +@attr.s +class VersionPath(SystemPath): + base = attr.ib(default=None, validator=optional_instance_of(Path)) + name = attr.ib(default=None) + + @classmethod + def create(cls, path, only_python=True, pythons=None, name=None): + """Accepts a path to a base python version directory. + + Generates the version listings for it""" + from .path import PathEntry + path = ensure_path(path) + path_entries = defaultdict(PathEntry) + bin_ = "{base}/bin" + if path.as_posix().endswith(Path(bin_).name): + path = path.parent + bin_dir = ensure_path(bin_.format(base=path.as_posix())) + if not name: + name = path.name + current_entry = PathEntry.create( + bin_dir, is_root=True, only_python=True, pythons=pythons, name=name + ) + path_entries[bin_dir.as_posix()] = current_entry + return cls(name=name, base=bin_dir, paths=path_entries) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index cf85f57a98..6f2d6422d5 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -14,8 +14,6 @@ from ..utils import ( ensure_path, optional_instance_of, - get_python_version, - filter_pythons, unnest, ) from .mixins import BaseFinder, BasePath diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 583dc6b38a..7feee84e85 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -3,23 +3,238 @@ import copy import platform +import operator +import logging from collections import defaultdict import attr -from packaging.version import Version, LegacyVersion +from packaging.version import Version from packaging.version import parse as parse_version +from vistir.compat import Path -from ..environment import SYSTEM_ARCH +from ..environment import SYSTEM_ARCH, PYENV_ROOT, ASDF_DATA_DIR +from .mixins import BaseFinder, BasePath from ..utils import ( _filter_none, ensure_path, get_python_version, optional_instance_of, - ensure_path, + unnest, + is_in_path, + parse_pyenv_version_order, + parse_asdf_version_order, ) +logger = logging.getLogger(__name__) + + +@attr.s(slots=True) +class PythonFinder(BaseFinder, BasePath): + root = attr.ib(default=None, validator=optional_instance_of(Path)) + #: ignore_unsupported should come before versions, because its value is used + #: in versions's default initializer. + ignore_unsupported = attr.ib(default=True) + #: The function to use to sort version order when returning an ordered verion set + sort_function = attr.ib(default=None) + paths = attr.ib(default=attr.Factory(list)) + roots = attr.ib(default=attr.Factory(defaultdict)) + #: Glob path for python versions off of the root directory + version_glob_path = attr.ib(default="versions/*") + versions = attr.ib() + pythons = attr.ib() + + @property + def expanded_paths(self): + return ( + path for path in unnest(p for p in self.versions.values()) + if path is not None + ) + + @property + def is_pyenv(self): + return is_in_path(str(self.root), PYENV_ROOT) + + @property + def is_asdf(self): + return is_in_path(str(self.root), ASDF_DATA_DIR) + + def get_version_order(self): + version_paths = [ + p for p in self.root.glob(self.version_glob_path) + if not (p.parent.name == "envs" or p.name == "envs") + ] + versions = {v.name: v for v in version_paths} + if self.is_pyenv: + version_order = [versions[v] for v in parse_pyenv_version_order()] + elif self.is_asdf: + version_order = [versions[v] for v in parse_asdf_version_order()] + for version in version_order: + version_paths.remove(version) + if version_order: + version_order += version_paths + else: + version_order = version_paths + return version_order + + @classmethod + def version_from_bin_dir(cls, base_dir, name=None): + from .path import PathEntry + py_version = None + version_path = PathEntry.create( + path=base_dir.absolute().as_posix(), + only_python=True, + name=base_dir.parent.name, + ) + py_version = next(iter(version_path.find_all_python_versions()), None) + return py_version + + @versions.default + def get_versions(self): + from .path import PathEntry + versions = defaultdict() + bin_ = "{base}/bin" + for p in self.get_version_order(): + bin_dir = Path(bin_.format(base=p.as_posix())) + version_path = None + if bin_dir.exists(): + version_path = PathEntry.create( + path=bin_dir.absolute().as_posix(), + only_python=False, + name=p.name, + is_root=True, + ) + version = None + try: + version = PythonVersion.parse(p.name) + except ValueError: + entry = next(iter(version_path.find_all_python_versions()), None) + if not entry: + if self.ignore_unsupported: + continue + raise + else: + version = entry.py_version.as_dict() + except Exception: + if not self.ignore_unsupported: + raise + logger.warning( + "Unsupported Python version %r, ignoring...", p.name, exc_info=True + ) + continue + if not version: + continue + version_tuple = ( + version.get("major"), + version.get("minor"), + version.get("patch"), + version.get("is_prerelease"), + version.get("is_devrelease"), + version.get("is_debug"), + ) + self.roots[p] = version_path + versions[version_tuple] = version_path + self.paths.append(version_path) + return versions + + @pythons.default + def get_pythons(self): + pythons = defaultdict() + for p in self.paths: + pythons.update(p.pythons) + return pythons + + @classmethod + def create(cls, root, sort_function=None, version_glob_path=None, ignore_unsupported=True): + root = ensure_path(root) + if not version_glob_path: + version_glob_path = "versions/*" + return cls(root=root, ignore_unsupported=ignore_unsupported, + sort_function=sort_function, version_glob_path=version_glob_path) + + def find_all_python_versions( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search for a specific python version on the path. Return all copies + + :param major: Major python version to search for. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. + :rtype: List[:class:`~pythonfinder.models.PathEntry`] + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + py = operator.attrgetter("as_python") + pythons = ( + py_ver for py_ver in (py(p) for p in self.pythons.values() if p is not None) + if py_ver is not None + ) + # pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return sorted(matching_versions, key=version_sort, reverse=True) + + def find_python_version( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search or self for the specified Python version and return the first match. + + :param major: Major version number. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return next(iter(c for c in sorted(matching_versions, key=version_sort, reverse=True)), None) + @attr.s(slots=True) class PythonVersion(object): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index fb932b10e9..9c71e38075 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -7,10 +7,12 @@ from fnmatch import fnmatch import attr +import io import six import vistir +from .environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR from .exceptions import InvalidPythonVersion try: @@ -127,12 +129,6 @@ def filter_pythons(path): return filter(lambda x: path_is_python(x), path.iterdir()) -# def unnest(item): -# if isinstance(next((i for i in item), None), (list, tuple)): -# return chain(*filter(None, item)) -# return chain(filter(None, item)) - - def unnest(item): if isinstance(item, Iterable) and not isinstance(item, six.string_types): item, target = itertools.tee(item, 2) @@ -145,3 +141,30 @@ def unnest(item): yield sub else: yield el + + +def parse_pyenv_version_order(filename="version"): + version_order_file = normalize_path(os.path.join(PYENV_ROOT, filename)) + if os.path.exists(version_order_file) and os.path.isfile(version_order_file): + with io.open(version_order_file, encoding="utf-8") as fh: + contents = fh.read() + version_order = [v for v in contents.splitlines()] + return version_order + + +def parse_asdf_version_order(filename=".tool-versions"): + version_order_file = normalize_path(os.path.join("~", filename)) + if os.path.exists(version_order_file) and os.path.isfile(version_order_file): + with io.open(version_order_file, encoding="utf-8") as fh: + contents = fh.read() + python_section = next(iter( + line for line in contents.splitlines() if line.startswith("python") + ), None) + if python_section: + python_key, versions = python_section.partition() + if versions: + return versions.split() + + +def is_in_path(path, parent): + return normalize_path(str(path)).startswith(normalize_path(str(parent))) From 6b3c9a7eb79564daad03ba32aee318a6e8ce8195 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 10:18:15 -0500 Subject: [PATCH 54/71] Remove accidentally committed test script Signed-off-by: Dan Ryan --- pipenv/test_script.py | 32 -------------------------------- 1 file changed, 32 deletions(-) delete mode 100644 pipenv/test_script.py diff --git a/pipenv/test_script.py b/pipenv/test_script.py deleted file mode 100644 index d599ded637..0000000000 --- a/pipenv/test_script.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding=utf-8 -*- - -import os -import sys - - -def _patch_path(): - import site - pipenv_libdir = os.path.dirname(os.path.abspath(__file__)) - pipenv_site_dir = os.path.dirname(pipenv_libdir) - site.addsitedir(pipenv_site_dir) - for _dir in ("vendor", "patched"): - sys.path.insert(0, os.path.join(pipenv_libdir, _dir)) - - -def test_install(): - from pipenv.vendor.vistir.contextmanagers import cd - from pipenv.vendor.click.testing import CliRunner - runner = CliRunner() - with cd("/tmp/test"): - from pipenv.core import do_lock - locked = do_lock(system=False, clear=False, pre=False, keep_outdated=False, - write=True, pypi_mirror=None) - # result = runner.invoke(cli, ["lock", "--verbose"]) - # print(result.output) - # print(result.exit_code) - print(locked) - - -if __name__ == "__main__": - _patch_path() - test_install() From 72e2ef70c5b2d2b5820f5e7af1dc401e3da15cfb Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 14:56:26 -0500 Subject: [PATCH 55/71] Derive source names from URLs when not supplied - Fixes #3216 Signed-off-by: Dan Ryan --- news/3216.bugfix.rst | 1 + pipenv/environment.py | 15 ++++++++++++--- pipenv/project.py | 3 ++- pipenv/utils.py | 13 +++++++++++++ 4 files changed, 28 insertions(+), 4 deletions(-) create mode 100644 news/3216.bugfix.rst diff --git a/news/3216.bugfix.rst b/news/3216.bugfix.rst new file mode 100644 index 0000000000..1d600bb808 --- /dev/null +++ b/news/3216.bugfix.rst @@ -0,0 +1 @@ +When sources are missing names, names will now be derived from the supplied URL. diff --git a/pipenv/environment.py b/pipenv/environment.py index 8e96f2c8fd..7a7fd2ea02 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -18,6 +18,8 @@ import vistir import pipenv +from .utils import normalize_path + BASE_WORKING_SET = pkg_resources.WorkingSet(sys.path) @@ -27,8 +29,8 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No super(Environment, self).__init__() self._modules = {'pkg_resources': pkg_resources, 'pipenv': pipenv} self.base_working_set = base_working_set if base_working_set else BASE_WORKING_SET - prefix = os.path.normcase(os.path.normpath(os.path.abspath(str(prefix)))) - self.is_venv = not prefix == os.path.normcase(os.path.normpath(sys.prefix)) + prefix = normalize_path(prefix) + self.is_venv = not prefix == normalize_path(sys.prefix) if not sources: sources = [] self.project = project @@ -81,7 +83,7 @@ def resolve_dist(cls, dist, working_set): deps.add(dist) try: reqs = dist.requires() - except (AttributeError, OSError): # The METADATA file can't be found + except (AttributeError, OSError, IOError): # The METADATA file can't be found return deps for req in reqs: dist = working_set.find(req) @@ -243,6 +245,7 @@ def get_distributions(self): return pkg_resources.find_distributions(self.paths["PYTHONPATH"]) def find_egg(self, egg_dist): + """Find an egg by name in the given environment""" site_packages = get_python_lib() search_filename = "{0}.egg-link".format(egg_dist.project_name) try: @@ -256,11 +259,16 @@ def find_egg(self, egg_dist): return egg def locate_dist(self, dist): + """Given a distribution, try to find a corresponding egg link first. + + If the egg - link doesn 't exist, return the supplied distribution.""" + location = self.find_egg(dist) if not location: return dist.location def dist_is_in_project(self, dist): + """Determine whether the supplied distribution is in the environment.""" from .project import _normalized prefix = _normalized(self.base_paths["prefix"]) location = self.locate_dist(dist) @@ -269,6 +277,7 @@ def dist_is_in_project(self, dist): return _normalized(location).startswith(prefix) def get_installed_packages(self): + """Returns all of the installed packages in a given environment""" workingset = self.get_working_set() packages = [pkg for pkg in workingset if self.dist_is_in_project(pkg)] return packages diff --git a/pipenv/project.py b/pipenv/project.py index 957b68271f..590fafe53f 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -29,6 +29,7 @@ convert_toml_outline_tables, is_installable_file, is_valid_url, + get_url_name, normalize_drive, python_version, safe_expandvars, @@ -747,7 +748,7 @@ def get_or_create_lockfile(self): sources = [sources,] lockfile_dict["_meta"]["sources"] = [ { - "name": s["name"], + "name": s.get("name", get_url_name(s.get("url"))), "url": s["url"], "verify_ssl": ( s["verify_ssl"] if isinstance(s["verify_ssl"], bool) else ( diff --git a/pipenv/utils.py b/pipenv/utils.py index dcd67025d2..265c579739 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -1133,6 +1133,19 @@ def path_to_url(path): return Path(normalize_drive(os.path.abspath(path))).as_uri() +def normalize_path(path): + return os.path.expandvars(os.path.expanduser( + os.path.normcase(os.path.normpath(os.path.abspath(str(path)))) + )) + + +def get_url_name(url): + if not isinstance(url, six.string_types): + return + from urllib3.util import parse as urllib3_parse + return urllib3_parse(url).host + + def get_canonical_names(packages): """Canonicalize a list of packages and return a set of canonical names""" from .vendor.packaging.utils import canonicalize_name From 1e3b8f961565b61640c7791d104c769534c0fcb4 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 15:16:53 -0500 Subject: [PATCH 56/71] Update news entries Signed-off-by: Dan Ryan --- news/2737.bugfix.rst | 2 +- news/2983.bugfix.rst | 2 +- news/3041.feature | 2 +- news/3061.vendor.rst | 1 + news/3088.bugfix.rst | 2 +- news/3096.bugfix.rst | 1 + news/3096.feature.rst | 1 + news/3096.vendor.rst | 6 ++++++ news/3145.bugfix.rst | 2 +- news/3170.feature | 2 +- ...-40ba-8242-1e6ed18fc2fe.feature.rst => 3217.feature.rst} | 0 11 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 news/3061.vendor.rst create mode 100644 news/3096.bugfix.rst create mode 100644 news/3096.feature.rst create mode 100644 news/3096.vendor.rst rename news/{d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst => 3217.feature.rst} (100%) diff --git a/news/2737.bugfix.rst b/news/2737.bugfix.rst index bddcff91b3..5716c8dc65 100644 --- a/news/2737.bugfix.rst +++ b/news/2737.bugfix.rst @@ -1 +1 @@ -Handle non-ASCII characters correctly in TOML. +Non-ascii characters will now be handled correctly when parsed by pipenv's ``ToML`` parsers. diff --git a/news/2983.bugfix.rst b/news/2983.bugfix.rst index acfe378072..87dcd71f86 100644 --- a/news/2983.bugfix.rst +++ b/news/2983.bugfix.rst @@ -1 +1 @@ -Pipenv will no longer fail when encountering python versions on Windows that were unintalled. +Pipenv will no longer fail when encountering python versions on Windows that have been uninstalled. diff --git a/news/3041.feature b/news/3041.feature index 79a1d5de17..8a87b5f613 100644 --- a/news/3041.feature +++ b/news/3041.feature @@ -1 +1 @@ ---bare now has an effect on clean, and sync's bare option is now used to reduce output. +Added support for ``--bare`` to ``pipenv clean``, and fixed ``pipenv sync --bare`` to actually reduce output. diff --git a/news/3061.vendor.rst b/news/3061.vendor.rst new file mode 100644 index 0000000000..fb815c428c --- /dev/null +++ b/news/3061.vendor.rst @@ -0,0 +1 @@ +Updated ``pythonfinder`` to correct an issue with unnesting of nested paths when searching for python versions. diff --git a/news/3088.bugfix.rst b/news/3088.bugfix.rst index b10c4b2b3d..fa57c42d94 100644 --- a/news/3088.bugfix.rst +++ b/news/3088.bugfix.rst @@ -1 +1 @@ -Fixed a bug which caused ``Unexpected EOF`` errors to be thrown when PIP awaited input from users who put login credentials in their environment. +Fixed a bug which caused ``Unexpected EOF`` errors to be thrown when ``pip`` was waiting for input from users who had put login credentials in environment variables. diff --git a/news/3096.bugfix.rst b/news/3096.bugfix.rst new file mode 100644 index 0000000000..657abbb282 --- /dev/null +++ b/news/3096.bugfix.rst @@ -0,0 +1 @@ +- Fixed a bug which sometimes prevented cloning and parsing ``mercurial`` requirements. diff --git a/news/3096.feature.rst b/news/3096.feature.rst new file mode 100644 index 0000000000..b483d5459f --- /dev/null +++ b/news/3096.feature.rst @@ -0,0 +1 @@ +- Added support for python installations managed by ``asdf``. diff --git a/news/3096.vendor.rst b/news/3096.vendor.rst new file mode 100644 index 0000000000..ff2cc8361d --- /dev/null +++ b/news/3096.vendor.rst @@ -0,0 +1,6 @@ +Updated vendored dependencies: + - ``requests 2.19.1 => 2.20.1`` + - ``tomlkit 0.4.46 => 0.5.2`` + - ``vistir 0.1.6 => 0.2.4`` + - ``pythonfinder 1.1.2 => 1.1.8`` + - ``requirementslib 1.1.10 => 1.3.0`` diff --git a/news/3145.bugfix.rst b/news/3145.bugfix.rst index 61ec9445a0..16696e27ed 100644 --- a/news/3145.bugfix.rst +++ b/news/3145.bugfix.rst @@ -1 +1 @@ -Remote non-PyPI artifacts and local wheels and artifacts will now include their own hashes rather than including hashes from ``PyPI``. +Hashes for remote andd local non-PyPI artifacts will now be included in ``Pipfile.lock`` during resolution. diff --git a/news/3170.feature b/news/3170.feature index b8bc5218ee..eae5cbacf9 100644 --- a/news/3170.feature +++ b/news/3170.feature @@ -1 +1 @@ -Do not show error but success for running pipenv uninstall --all in a fresh virtuanlenv +Do not show error but success for running ``pipenv uninstall --all`` in a fresh virtual environment. diff --git a/news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst b/news/3217.feature.rst similarity index 100% rename from news/d65e7c90-3e70-40ba-8242-1e6ed18fc2fe.feature.rst rename to news/3217.feature.rst From 15f8d78bceae205fe297eb4ed2772da3daced4de Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 17:46:30 -0500 Subject: [PATCH 57/71] Syntax error and pythonfinder issue fixes Signed-off-by: Dan Ryan --- pipenv/utils.py | 3 +- pipenv/vendor/pythonfinder/models/asdf.py | 9 - pipenv/vendor/pythonfinder/models/path.py | 42 ++-- pipenv/vendor/pythonfinder/models/pyenv.py | 237 --------------------- pipenv/vendor/pythonfinder/utils.py | 10 +- 5 files changed, 29 insertions(+), 272 deletions(-) delete mode 100644 pipenv/vendor/pythonfinder/models/asdf.py delete mode 100644 pipenv/vendor/pythonfinder/models/pyenv.py diff --git a/pipenv/utils.py b/pipenv/utils.py index 265c579739..2f48905a62 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -1142,8 +1142,7 @@ def normalize_path(path): def get_url_name(url): if not isinstance(url, six.string_types): return - from urllib3.util import parse as urllib3_parse - return urllib3_parse(url).host + return urllib3_util.parse_url(url).host def get_canonical_names(packages): diff --git a/pipenv/vendor/pythonfinder/models/asdf.py b/pipenv/vendor/pythonfinder/models/asdf.py deleted file mode 100644 index 3ba6e4faba..0000000000 --- a/pipenv/vendor/pythonfinder/models/asdf.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding=utf-8 -*- -import attr - -from .pyenv import PyenvFinder - - -@attr.s -class AsdfFinder(PyenvFinder): - version_root = attr.ib(default="installs/python/*") diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index d3cdd9d168..53c2360cfb 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -131,7 +131,8 @@ def __attrs_post_init__(self): ) def _get_last_instance(self, path): - paths = [normalize_path(p) for p in reversed(self.path_order)] + reversed_paths = reversed(self.path_order) + paths = [normalize_path(p) for p in reversed_paths] normalized_target = normalize_path(path) last_instance = next( iter(p for p in paths if normalized_target in p), None @@ -150,7 +151,7 @@ def _slice_in_paths(self, start_idx, paths): ) def _remove_path(self, path): - path_copy = reversed(self.path_order[:]) + path_copy = [p for p in reversed(self.path_order[:])] new_order = [] target = normalize_path(path) path_map = { @@ -163,22 +164,23 @@ def _remove_path(self, path): normalized = normalize_path(current_path) if normalized != target: new_order.append(normalized) - new_order = reversed(new_order) + new_order = [p for p in reversed(new_order)] self.path_order = new_order def _setup_asdf(self): from .python import PythonFinder + self.asdf_finder = PythonFinder.create( + root=ASDF_DATA_DIR, ignore_unsupported=True, + sort_function=parse_asdf_version_order, version_glob_path="installs/python/*") asdf_index = self._get_last_instance(ASDF_DATA_DIR) if not asdf_index: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here return - self.asdf_finder = PythonFinder.create( - root=ASDF_DATA_DIR, ignore_unsupported=True, - sort_function=parse_asdf_version_order, version_glob_path="installs/python/*") root_paths = [p for p in self.asdf_finder.roots] self._slice_in_paths(asdf_index, root_paths) self.paths.update(self.asdf_finder.roots) + self._remove_path(normalize_path(os.path.join(ASDF_DATA_DIR, "shims"))) self._register_finder("asdf", self.asdf_finder) def _setup_pyenv(self): @@ -452,7 +454,7 @@ class PathEntry(BasePath): only_python = attr.ib(default=False) name = attr.ib() py_version = attr.ib() - pythons = attr.ib() + _pythons = attr.ib(default=attr.Factory(defaultdict)) def __str__(self): return fs_str("{0}".format(self.path.as_posix())) @@ -506,19 +508,19 @@ def get_py_version(self): return py_version return - @pythons.default - def get_pythons(self): - pythons = defaultdict() - if self.is_dir: - for path, entry in self.children.items(): - _path = ensure_path(entry.path) - if entry.is_python: - pythons[_path.as_posix()] = entry - else: - if self.is_python: - _path = ensure_path(self.path) - pythons[_path.as_posix()] = self - return pythons + @property + def pythons(self): + if not self._pythons: + if self.is_dir: + for path, entry in self.children.items(): + _path = ensure_path(entry.path) + if entry.is_python: + self._pythons[_path.as_posix()] = entry + else: + if self.is_python: + _path = ensure_path(self.path) + self._pythons[_path.as_posix()] = self + return self._pythons @cached_property def as_python(self): diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py deleted file mode 100644 index 6f2d6422d5..0000000000 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ /dev/null @@ -1,237 +0,0 @@ -# -*- coding=utf-8 -*- -from __future__ import absolute_import, print_function - -import logging -import operator - -from collections import defaultdict - -import attr -import sysconfig - -from vistir.compat import Path - -from ..utils import ( - ensure_path, - optional_instance_of, - unnest, -) -from .mixins import BaseFinder, BasePath -from .path import SystemPath, PathEntry -from .python import PythonVersion - - -logger = logging.getLogger(__name__) - - -@attr.s(slots=True) -class PyenvFinder(BaseFinder, BasePath): - root = attr.ib(default=None, validator=optional_instance_of(Path)) - #: ignore_unsupported should come before versions, because its value is used - #: in versions's default initializer. - ignore_unsupported = attr.ib(default=True) - paths = attr.ib(default=attr.Factory(list)) - roots = attr.ib(default=attr.Factory(defaultdict)) - version_root = attr.ib(default="versions/*") - versions = attr.ib() - pythons = attr.ib() - - @property - def expanded_paths(self): - return ( - path for path in unnest(p for p in self.versions.values()) - if path is not None - ) - - def get_version_order(self): - version_order_file, version_order_lines = self.root.joinpath("version"), [] - if version_order_file.exists(): - version_order_lines = version_order_file.read_text(encoding="utf-8").splitlines() - - version_paths = [ - p for p in self.root.glob(self.version_root) - if not (p.parent.name == "envs" or p.name == "envs") - ] - versions = {v.name: v for v in version_paths} - version_order = [versions[v] for v in version_order_lines if v in versions] - for version in version_order: - version_paths.remove(version) - version_order += version_paths - return version_order - - @classmethod - def version_from_bin_dir(cls, base_dir, name=None): - py_version = None - version_path = PathEntry.create( - path=base_dir.absolute().as_posix(), - only_python=True, - name=base_dir.parent.name, - ) - py_version = next(iter(version_path.find_all_python_versions()), None) - return py_version - - @versions.default - def get_versions(self): - versions = defaultdict() - bin_ = "{base}/bin" - for p in self.get_version_order(): - bin_dir = Path(bin_.format(base=p.as_posix())) - version_path = None - if bin_dir.exists(): - version_path = PathEntry.create( - path=bin_dir.absolute().as_posix(), - only_python=False, - name=p.name, - is_root=True, - ) - version = None - try: - version = PythonVersion.parse(p.name) - except ValueError: - entry = next(iter(version_path.find_all_python_versions()), None) - if not entry: - if self.ignore_unsupported: - continue - raise - else: - version = entry.py_version.as_dict() - except Exception: - if not self.ignore_unsupported: - raise - logger.warning( - "Unsupported Python version %r, ignoring...", p.name, exc_info=True - ) - continue - if not version: - continue - version_tuple = ( - version.get("major"), - version.get("minor"), - version.get("patch"), - version.get("is_prerelease"), - version.get("is_devrelease"), - version.get("is_debug"), - ) - self.roots[p] = version_path - versions[version_tuple] = version_path - self.paths.append(version_path) - return versions - - @pythons.default - def get_pythons(self): - pythons = defaultdict() - for p in self.paths: - pythons.update(p.pythons) - return pythons - - @classmethod - def create(cls, root, ignore_unsupported=True): - root = ensure_path(root) - return cls(root=root, ignore_unsupported=ignore_unsupported) - - def find_all_python_versions( - self, - major=None, - minor=None, - patch=None, - pre=None, - dev=None, - arch=None, - name=None, - ): - """Search for a specific python version on the path. Return all copies - - :param major: Major python version to search for. - :type major: int - :param int minor: Minor python version to search for, defaults to None - :param int patch: Patch python version to search for, defaults to None - :param bool pre: Search for prereleases (default None) - prioritize releases if None - :param bool dev: Search for devreleases (default None) - prioritize releases if None - :param str arch: Architecture to include, e.g. '64bit', defaults to None - :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` - :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. - :rtype: List[:class:`~pythonfinder.models.PathEntry`] - """ - - version_matcher = operator.methodcaller( - "matches", - major=major, - minor=minor, - patch=patch, - pre=pre, - dev=dev, - arch=arch, - name=name, - ) - py = operator.attrgetter("as_python") - pythons = ( - py_ver for py_ver in (py(p) for p in self.pythons.values() if p is not None) - if py_ver is not None - ) - # pythons = filter(None, [p.as_python for p in self.pythons.values()]) - matching_versions = filter(lambda py: version_matcher(py), pythons) - version_sort = operator.attrgetter("version_sort") - return sorted(matching_versions, key=version_sort, reverse=True) - - def find_python_version( - self, - major=None, - minor=None, - patch=None, - pre=None, - dev=None, - arch=None, - name=None, - ): - """Search or self for the specified Python version and return the first match. - - :param major: Major version number. - :type major: int - :param int minor: Minor python version to search for, defaults to None - :param int patch: Patch python version to search for, defaults to None - :param bool pre: Search for prereleases (default None) - prioritize releases if None - :param bool dev: Search for devreleases (default None) - prioritize releases if None - :param str arch: Architecture to include, e.g. '64bit', defaults to None - :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` - :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. - """ - - version_matcher = operator.methodcaller( - "matches", - major=major, - minor=minor, - patch=patch, - pre=pre, - dev=dev, - arch=arch, - name=name, - ) - pythons = filter(None, [p.as_python for p in self.pythons.values()]) - matching_versions = filter(lambda py: version_matcher(py), pythons) - version_sort = operator.attrgetter("version_sort") - return next(iter(c for c in sorted(matching_versions, key=version_sort, reverse=True)), None) - - -@attr.s -class VersionPath(SystemPath): - base = attr.ib(default=None, validator=optional_instance_of(Path)) - name = attr.ib(default=None) - - @classmethod - def create(cls, path, only_python=True, pythons=None, name=None): - """Accepts a path to a base python version directory. - - Generates the pyenv version listings for it""" - path = ensure_path(path) - path_entries = defaultdict(PathEntry) - bin_ = sysconfig._INSTALL_SCHEMES[sysconfig._get_default_scheme()]["scripts"] - if path.as_posix().endswith(Path(bin_).name): - path = path.parent - bin_dir = ensure_path(bin_.format(base=path.as_posix())) - if not name: - name = path.name - current_entry = PathEntry.create( - bin_dir, is_root=True, only_python=True, pythons=pythons, name=name - ) - path_entries[bin_dir.as_posix()] = current_entry - return cls(name=name, base=bin_dir, paths=path_entries) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 619fadeffd..a26d054800 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -12,7 +12,7 @@ import vistir -from .environment import PYENV_INSTALLED, PYENV_ROOT, ASDF_INSTALLED, ASDF_DATA_DIR +from .environment import PYENV_ROOT from .exceptions import InvalidPythonVersion try: @@ -90,7 +90,7 @@ def looks_like_python(name): @lru_cache(maxsize=1024) def path_is_python(path): - return path_is_known_executable(path) and looks_like_python(path.name) + return path_is_executable(path) and looks_like_python(path.name) @lru_cache(maxsize=1024) @@ -116,7 +116,9 @@ def _filter_none(k, v): def normalize_path(path): - return os.path.normpath(os.path.normcase(os.path.abspath(str(path)))) + return os.path.normpath(os.path.normcase( + os.path.abspath(os.path.expandvars(os.path.expanduser(str(path)))) + )) @lru_cache(maxsize=1024) @@ -161,7 +163,7 @@ def parse_asdf_version_order(filename=".tool-versions"): line for line in contents.splitlines() if line.startswith("python") ), None) if python_section: - python_key, versions = python_section.partition() + python_key, _, versions = python_section.partition(" ") if versions: return versions.split() From f494571183eaa55da58e27e5881b8299f37e9ccc Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 18:07:48 -0500 Subject: [PATCH 58/71] Code cleanup -- unused functions Signed-off-by: Dan Ryan --- pipenv/_compat.py | 31 ++++----- pipenv/utils.py | 158 ---------------------------------------------- 2 files changed, 13 insertions(+), 176 deletions(-) diff --git a/pipenv/_compat.py b/pipenv/_compat.py index fb2c01477f..1dc52d5f05 100644 --- a/pipenv/_compat.py +++ b/pipenv/_compat.py @@ -4,6 +4,7 @@ Exposes a standard API that enables compatibility across python versions, operating systems, etc. """ + import functools import importlib import io @@ -66,21 +67,10 @@ def detach(self): warnings.filterwarnings("ignore", category=ResourceWarning) -def pip_import(module_path, subimport=None, old_path=None): - internal = "pip._internal.{0}".format(module_path) - old_path = old_path or module_path - pip9 = "pip.{0}".format(old_path) - try: - _tmp = importlib.import_module(internal) - except ImportError: - _tmp = importlib.import_module(pip9) - if subimport: - return getattr(_tmp, subimport, _tmp) - return _tmp - - class TemporaryDirectory(object): - """Create and return a temporary directory. This has the same + + """ + Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: @@ -146,9 +136,11 @@ def _sanitize_params(prefix, suffix, dir): class _TemporaryFileCloser: - """A separate object allowing proper closing of a temporary file's + """ + A separate object allowing proper closing of a temporary file's underlying file object, without adding a __del__ method to the - temporary file.""" + temporary file. + """ file = None # Set here since __del__ checks it close_called = False @@ -192,7 +184,9 @@ def close(self): class _TemporaryFileWrapper: - """Temporary file wrapper + + """ + Temporary file wrapper This class provides a wrapper around files opened for temporary use. In particular, it seeks to automatically remove the file when it is no longer needed. @@ -268,7 +262,8 @@ def NamedTemporaryFile( dir=None, delete=True, ): - """Create and return a temporary file. + """ + Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). diff --git a/pipenv/utils.py b/pipenv/utils.py index 2f48905a62..9f62e2b234 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -778,13 +778,6 @@ def resolve_deps( return results -def multi_split(s, split): - """Splits on multiple given separators.""" - for r in split: - s = s.replace(r, "|") - return [i for i in s.split("|") if len(i) > 0] - - def is_star(val): return isinstance(val, six.string_types) and val == "*" @@ -855,22 +848,6 @@ def is_required_version(version, specified_version): return True -def strip_ssh_from_git_uri(uri): - """Return git+ssh:// formatted URI to git+git@ format""" - if isinstance(uri, six.string_types): - uri = uri.replace("git+ssh://", "git+") - return uri - - -def clean_git_uri(uri): - """Cleans VCS uris from pip format""" - if isinstance(uri, six.string_types): - # Add scheme for parsing purposes, this is also what pip does - if uri.startswith("git+") and "://" not in uri: - uri = uri.replace("git+", "git+ssh://") - return uri - - def is_editable(pipfile_entry): if hasattr(pipfile_entry, "get"): return pipfile_entry.get("editable", False) and any( @@ -1013,94 +990,6 @@ def split_section(input_file, section_suffix, test_function): return input_file -def split_file(file_dict): - """Split VCS and editable dependencies out from file.""" - from .vendor.requirementslib.utils import is_vcs - sections = { - "vcs": is_vcs, - "editable": lambda x: hasattr(x, "keys") and x.get("editable"), - } - for k, func in sections.items(): - file_dict = split_section(file_dict, k, func) - return file_dict - - -def merge_deps( - file_dict, - project, - dev=False, - requirements=False, - ignore_hashes=False, - blocking=False, - only=False, -): - """ - Given a file_dict, merges dependencies and converts them to pip dependency lists. - :param dict file_dict: The result of calling :func:`pipenv.utils.split_file` - :param :class:`pipenv.project.Project` project: Pipenv project - :param bool dev=False: Flag indicating whether dev dependencies are to be installed - :param bool requirements=False: Flag indicating whether to use a requirements file - :param bool ignore_hashes=False: - :param bool blocking=False: - :param bool only=False: - :return: Pip-converted 3-tuples of [deps, requirements_deps] - """ - deps = [] - requirements_deps = [] - for section in list(file_dict.keys()): - # Turn develop-vcs into ['develop', 'vcs'] - section_name, suffix = ( - section.rsplit("-", 1) - if "-" in section and not section == "dev-packages" - else (section, None) - ) - if not file_dict[section] or section_name not in ( - "dev-packages", - "packages", - "default", - "develop", - ): - continue - - is_dev = section_name in ("dev-packages", "develop") - if is_dev and not dev: - continue - - if ignore_hashes: - for k, v in file_dict[section]: - if "hash" in v: - del v["hash"] - # Block and ignore hashes for all suffixed sections (vcs/editable) - no_hashes = True if suffix else ignore_hashes - block = True if suffix else blocking - include_index = True if not suffix else False - converted = convert_deps_to_pip( - file_dict[section], project, r=False, include_index=include_index - ) - deps.extend((d, no_hashes, block) for d in converted) - if dev and is_dev and requirements: - requirements_deps.extend((d, no_hashes, block) for d in converted) - return deps, requirements_deps - - -def recase_file(file_dict): - """Recase file before writing to output.""" - if "packages" in file_dict or "dev-packages" in file_dict: - sections = ("packages", "dev-packages") - elif "default" in file_dict or "develop" in file_dict: - sections = ("default", "develop") - for section in sections: - file_section = file_dict.get(section, {}) - # Try to properly case each key if we can. - for key in list(file_section.keys()): - try: - cased_key = proper_case(key) - except IOError: - cased_key = key - file_section[cased_key] = file_section.pop(key) - return file_dict - - def get_windows_path(*args): """Sanitize a path for windows environments @@ -1355,13 +1244,6 @@ def safe_expandvars(value): return value -def extract_uri_from_vcs_dep(dep): - valid_keys = VCS_LIST + ("uri", "file") - if hasattr(dep, "keys"): - return first(dep[k] for k in valid_keys if k in dep) or None - return None - - def get_vcs_deps( project, which=None, @@ -1570,46 +1452,6 @@ def parse_indexes(line): return indexes, trusted_hosts, remainder -def fix_venv_site(venv_lib_dir): - # From https://github.com/pypa/pip/blob/master/tests/lib/venv.py#L84 - # Prevent accidental inclusions of site packages during virtualenv operations - from .vendor.vistir.compat import Path - import compileall - site_py = Path(venv_lib_dir).joinpath('site.py').as_posix() - with open(site_py) as fp: - site_contents = fp.read() - for pattern, replace in ( - ( - # Ensure enabling user site does not result in adding - # the real site-packages' directory to `sys.path`. - ( - '\ndef virtual_addsitepackages(known_paths):\n' - ), - ( - '\ndef virtual_addsitepackages(known_paths):\n' - ' return known_paths\n' - ), - ), - ( - # Fix sites ordering: user site must be added before system. - ( - '\n paths_in_sys = addsitepackages(paths_in_sys)' - '\n paths_in_sys = addusersitepackages(paths_in_sys)\n' - ), - ( - '\n paths_in_sys = addusersitepackages(paths_in_sys)' - '\n paths_in_sys = addsitepackages(paths_in_sys)\n' - ), - ), - ): - if pattern in site_contents and replace not in site_contents: - site_contents = site_contents.replace(pattern, replace) - with open(site_py, 'w') as fp: - fp.write(site_contents) - # Make sure bytecode is up-to-date too. - assert compileall.compile_file(str(site_py), quiet=1, force=True) - - @contextmanager def sys_version(version_tuple): """ From 2e10ff6c7d3735bd98c806b94e7df1b5640f6fee Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 18:08:00 -0500 Subject: [PATCH 59/71] Fix pythonfinder Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/models/path.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 53c2360cfb..7fae331208 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -33,6 +33,11 @@ from .python import PythonVersion +ASDF_SHIM_PATH = normalize_path(os.path.join(ASDF_DATA_DIR, "shims")) +PYENV_SHIM_PATH = normalize_path(os.path.join(PYENV_ROOT, "shims")) +SHIM_PATHS = [ASDF_SHIM_PATH, PYENV_SHIM_PATH] + + @attr.s class SystemPath(object): global_search = attr.ib(default=True) @@ -434,6 +439,7 @@ def create( path=p.absolute(), is_root=True, only_python=only_python ) for p in _path_objects + if not any(shim in normalize_path(str(p)) for shim in SHIM_PATHS) } ) return cls( @@ -476,6 +482,8 @@ def _gen_children(self): yield (self.path.as_posix(), copy.deepcopy(self)) elif self.is_root: for child in self._filter_children(): + if any(shim in normalize_path(str(child)) for shim in SHIM_PATHS): + continue yield (child.as_posix(), PathEntry.create(path=child, **pass_args)) return @@ -566,6 +574,8 @@ def create(cls, path, is_root=False, only_python=False, pythons=None, name=None) if not guessed_name: child_creation_args["name"] = name for pth, python in pythons.items(): + if any(shim in normalize_path(str(pth)) for shim in SHIM_PATHS): + continue pth = ensure_path(pth) children[pth.as_posix()] = PathEntry( py_version=python, From f9b97dacc7afa3d1c3592a6222fd4b2ca32b8cec Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 13 Nov 2018 19:07:06 -0500 Subject: [PATCH 60/71] Get rid of split file test Signed-off-by: Dan Ryan --- tests/unit/test_utils.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 40977ede67..422c10027f 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -169,32 +169,6 @@ def test_is_vcs(self, entry, expected): from pipenv.vendor.requirementslib.utils import is_vcs assert is_vcs(entry) is expected - @pytest.mark.utils - def test_split_file(self): - pipfile_dict = { - "packages": { - "requests": {"git": "https://github.com/kennethreitz/requests.git"}, - "Flask": "*", - "tablib": {"path": ".", "editable": True}, - }, - "dev-packages": { - "Django": "==1.10", - "click": {"svn": "https://svn.notareal.com/click"}, - "crayons": {"hg": "https://hg.alsonotreal.com/crayons"}, - }, - } - split_dict = pipenv.utils.split_file(pipfile_dict) - assert list(split_dict["packages"].keys()) == ["Flask"] - assert split_dict["packages-vcs"] == { - "requests": {"git": "https://github.com/kennethreitz/requests.git"} - } - assert split_dict["packages-editable"] == { - "tablib": {"path": ".", "editable": True} - } - assert list(split_dict["dev-packages"].keys()) == ["Django"] - assert "click" in split_dict["dev-packages-vcs"] - assert "crayons" in split_dict["dev-packages-vcs"] - @pytest.mark.utils def test_python_version_from_bad_path(self): assert pipenv.utils.python_version("/fake/path") is None From 095c9ef73000cd9616f1faff0d0dfc017628caa4 Mon Sep 17 00:00:00 2001 From: frostming Date: Wed, 14 Nov 2018 13:12:22 +0800 Subject: [PATCH 61/71] remove useless tests --- news/3145.bugfix.rst | 2 +- tests/unit/test_utils.py | 26 -------------------------- 2 files changed, 1 insertion(+), 27 deletions(-) diff --git a/news/3145.bugfix.rst b/news/3145.bugfix.rst index 16696e27ed..e0ed509510 100644 --- a/news/3145.bugfix.rst +++ b/news/3145.bugfix.rst @@ -1 +1 @@ -Hashes for remote andd local non-PyPI artifacts will now be included in ``Pipfile.lock`` during resolution. +Hashes for remote and local non-PyPI artifacts will now be included in ``Pipfile.lock`` during resolution. diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 40977ede67..422c10027f 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -169,32 +169,6 @@ def test_is_vcs(self, entry, expected): from pipenv.vendor.requirementslib.utils import is_vcs assert is_vcs(entry) is expected - @pytest.mark.utils - def test_split_file(self): - pipfile_dict = { - "packages": { - "requests": {"git": "https://github.com/kennethreitz/requests.git"}, - "Flask": "*", - "tablib": {"path": ".", "editable": True}, - }, - "dev-packages": { - "Django": "==1.10", - "click": {"svn": "https://svn.notareal.com/click"}, - "crayons": {"hg": "https://hg.alsonotreal.com/crayons"}, - }, - } - split_dict = pipenv.utils.split_file(pipfile_dict) - assert list(split_dict["packages"].keys()) == ["Flask"] - assert split_dict["packages-vcs"] == { - "requests": {"git": "https://github.com/kennethreitz/requests.git"} - } - assert split_dict["packages-editable"] == { - "tablib": {"path": ".", "editable": True} - } - assert list(split_dict["dev-packages"].keys()) == ["Django"] - assert "click" in split_dict["dev-packages-vcs"] - assert "crayons" in split_dict["dev-packages-vcs"] - @pytest.mark.utils def test_python_version_from_bad_path(self): assert pipenv.utils.python_version("/fake/path") is None From 08c384bba0f9f1c051ba3057b92e12f0cb42bb17 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 00:28:32 -0500 Subject: [PATCH 62/71] Implement `auto_envvar_prefix` - Closes #2200 Signed-off-by: Dan Ryan --- news/2200.feature.rst | 1 + pipenv/__main__.py | 2 +- pipenv/cli/options.py | 39 ++++++++++++++++++++------------------- 3 files changed, 22 insertions(+), 20 deletions(-) create mode 100644 news/2200.feature.rst diff --git a/news/2200.feature.rst b/news/2200.feature.rst new file mode 100644 index 0000000000..daa4b215af --- /dev/null +++ b/news/2200.feature.rst @@ -0,0 +1 @@ +Added persistent settings for all CLI flags via ``PIPENV_{FLAG_NAME}`` environment variables by enabling ``auto_envvar_prefix=PIPENV`` in click (implements PEEP-0002). diff --git a/pipenv/__main__.py b/pipenv/__main__.py index 98dcca0c28..5649410609 100644 --- a/pipenv/__main__.py +++ b/pipenv/__main__.py @@ -1,4 +1,4 @@ from .cli import cli if __name__ == "__main__": - cli() + cli(auto_envvar_prefix="PIPENV") diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index 208c0c66cc..da06604a3b 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -106,8 +106,8 @@ def callback(ctx, param, value): state.installstate.editables.extend(value) return value return option('-e', '--editable', expose_value=False, multiple=True, - help='An editable python package URL or path, often to a VCS repo.', - callback=callback)(f) + help='An editable python package URL or path, often to a VCS repo.', + callback=callback, type=click.types.STRING)(f) def sequential_option(f): @@ -157,7 +157,7 @@ def callback(ctx, param, value): return value return option("--ignore-pipfile", is_flag=True, default=False, expose_value=False, help="Ignore Pipfile when installing, using the Pipfile.lock.", - callback=callback)(f) + callback=callback, type=click.types.BOOL)(f) def dev_option(f): @@ -184,7 +184,8 @@ def callback(ctx, param, value): state = ctx.ensure_object(State) state.installstate.packages.extend(value) return value - return argument('packages', nargs=-1, callback=callback, expose_value=False,)(f) + return argument('packages', nargs=-1, callback=callback, expose_value=False, + type=click.types.STRING)(f) def three_option(f): @@ -195,8 +196,8 @@ def callback(ctx, param, value): state.two = not value return value return option("--three/--two", is_flag=True, default=None, - help="Use Python 3/2 when creating virtualenv.", callback=callback, - expose_value=False)(f) + help="Use Python 3/2 when creating virtualenv.", callback=callback, + expose_value=False)(f) def python_option(f): @@ -206,8 +207,8 @@ def callback(ctx, param, value): state.python = validate_python_path(ctx, param, value) return value return option("--python", default=False, nargs=1, callback=callback, - help="Specify which version of Python virtualenv should use.", - expose_value=False)(f) + help="Specify which version of Python virtualenv should use.", + expose_value=False)(f) def pypi_mirror_option(f): @@ -217,7 +218,7 @@ def callback(ctx, param, value): state.pypi_mirror = validate_pypi_mirror(ctx, param, value) return value return option("--pypi-mirror", default=environments.PIPENV_PYPI_MIRROR, nargs=1, - callback=callback, help="Specify a PyPI mirror.", expose_value=False)(f) + callback=callback, help="Specify a PyPI mirror.", expose_value=False)(f) def verbose_option(f): @@ -227,7 +228,7 @@ def callback(ctx, param, value): state.verbose = True setup_verbosity(ctx, param, value) return option("--verbose", "-v", is_flag=True, expose_value=False, - callback=callback, help="Verbose mode.")(f) + callback=callback, help="Verbose mode.", type=click.types.BOOL)(f) def site_packages_option(f): @@ -236,8 +237,8 @@ def callback(ctx, param, value): state.site_packages = value return value return option("--site-packages", is_flag=True, default=False, type=click.types.BOOL, - help="Enable site-packages for the virtualenv.", callback=callback, - expose_value=False)(f) + help="Enable site-packages for the virtualenv.", callback=callback, + expose_value=False)(f) def clear_option(f): @@ -246,8 +247,8 @@ def callback(ctx, param, value): state.clear = value return value return option("--clear", is_flag=True, callback=callback, type=click.types.BOOL, - help="Clears caches (pipenv, pip, and pip-tools).", - expose_value=False)(f) + help="Clears caches (pipenv, pip, and pip-tools).", + expose_value=False)(f) def system_option(f): @@ -257,7 +258,7 @@ def callback(ctx, param, value): state.system = value return value return option("--system", is_flag=True, default=False, help="System pip management.", - callback=callback, type=click.types.BOOL, expose_value=False)(f) + callback=callback, type=click.types.BOOL, expose_value=False)(f) def requirementstxt_option(f): @@ -267,7 +268,7 @@ def callback(ctx, param, value): state.installstate.requirementstxt = value return value return option("--requirements", "-r", nargs=1, default=False, expose_value=False, - help="Import a requirements.txt file.", callback=callback)(f) + help="Import a requirements.txt file.", callback=callback)(f) def requirements_flag(f): @@ -277,7 +278,7 @@ def callback(ctx, param, value): state.installstate.requirementstxt = value return value return option("--requirements", "-r", default=False, is_flag=True, expose_value=False, - help="Generate output in requirements.txt format.", callback=callback)(f) + help="Generate output in requirements.txt format.", callback=callback)(f) def code_option(f): @@ -296,8 +297,8 @@ def callback(ctx, param, value): state.installstate.deploy = value return value return option("--deploy", is_flag=True, default=False, type=click.types.BOOL, - help=u"Abort if the Pipfile.lock is out-of-date, or Python version is" - " wrong.", callback=callback, expose_value=False)(f) + help=u"Abort if the Pipfile.lock is out-of-date, or Python version is" + " wrong.", callback=callback, expose_value=False)(f) def setup_verbosity(ctx, param, value): From 2f97279fa8db1b177db60e3ccab3ba73cedb46a0 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 00:29:04 -0500 Subject: [PATCH 63/71] Update vendor file Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c106a59c56..f82df6a708 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -3,10 +3,10 @@ backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 blindspin==2.0.1 click==7.0 -click-completion==0.4.1 +click-completion==0.5.0 click-didyoumean==0.0.3 colorama==0.3.9 -delegator.py==0.1.1 +delegator.py==0.1.2 pexpect==4.6.0 ptyprocess==0.6.0 python-dotenv==0.9.1 @@ -21,18 +21,17 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.8 -requests==2.20.0 +pythonfinder==1.1.9 +requests==2.20.1 chardet==3.0.4 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.3.0 +requirementslib==1.3.1 attrs==18.2.0 distlib==0.2.8 - packaging==18.0 + packaging==18.1 pyparsing==2.2.2 - pytoml==0.1.19 plette==0.2.2 tomlkit==0.5.2 shellingham==1.2.7 @@ -41,7 +40,7 @@ semver==2.8.1 shutilwhich==1.1.0 toml==0.10.0 cached-property==1.4.3 -vistir==0.2.2 +vistir==0.2.4 pip-shims==0.3.2 ptyprocess==0.6.0 enum34==1.1.6 From ec166f9295ee6892509fa3fcc40e09c8ea4ae58c Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 00:43:11 -0500 Subject: [PATCH 64/71] Reformat cli and command code Signed-off-by: Dan Ryan --- pipenv/cli/command.py | 2 +- pipenv/cli/options.py | 36 ++++++++++++++++++------------------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 74c577206c..1480a0e714 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -226,6 +226,7 @@ def install( ): """Installs provided packages and adds them to Pipfile, or (if no packages are given), installs all packages from Pipfile.""" from ..core import do_install + echo("Skip lock value: %s" % state.installstate.skip_lock) retcode = do_install( dev=state.installstate.dev, @@ -279,7 +280,6 @@ def uninstall( ): """Un-installs a provided package and removes it from Pipfile.""" from ..core import do_uninstall - retcode = do_uninstall( packages=state.installstate.packages, editable_packages=state.installstate.editables, diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index da06604a3b..2242b4e362 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -86,8 +86,8 @@ def callback(ctx, param, value): state.index = value return value return option('-i', '--index', expose_value=False, envvar="PIP_INDEX_URL", - help='Target PyPI-compatible package index url.', nargs=1, - callback=callback)(f) + help='Target PyPI-compatible package index url.', nargs=1, + callback=callback)(f) def extra_index_option(f): @@ -96,8 +96,8 @@ def callback(ctx, param, value): state.extra_index_urls.extend(list(value)) return value return option("--extra-index-url", multiple=True, expose_value=False, - help=u"URLs to the extra PyPI compatible indexes to query for package lookups.", - callback=callback, envvar="PIP_EXTRA_INDEX_URL")(f) + help=u"URLs to the extra PyPI compatible indexes to query for package lookups.", + callback=callback, envvar="PIP_EXTRA_INDEX_URL")(f) def editable_option(f): @@ -116,8 +116,8 @@ def callback(ctx, param, value): state.installstate.sequential = value return value return option("--sequential", is_flag=True, default=False, expose_value=False, - help="Install dependencies one-at-a-time, instead of concurrently.", - callback=callback, type=click.types.BOOL)(f) + help="Install dependencies one-at-a-time, instead of concurrently.", + callback=callback, type=click.types.BOOL)(f) def skip_lock_option(f): @@ -126,8 +126,8 @@ def callback(ctx, param, value): state.installstate.skip_lock = value return value return option("--skip-lock", is_flag=True, default=False, expose_value=False, - help=u"Skip locking mechanisms and use the Pipfile instead during operation.", - callback=callback, type=click.types.BOOL)(f) + help=u"Skip locking mechanisms and use the Pipfile instead during operation.", + callback=callback, type=click.types.BOOL)(f) def keep_outdated_option(f): @@ -136,8 +136,8 @@ def callback(ctx, param, value): state.installstate.keep_outdated = value return value return option("--keep-outdated", is_flag=True, default=False, expose_value=False, - help=u"Keep out-dated dependencies from being updated in Pipfile.lock.", - callback=callback, type=click.types.BOOL)(f) + help=u"Keep out-dated dependencies from being updated in Pipfile.lock.", + callback=callback, type=click.types.BOOL)(f) def selective_upgrade_option(f): @@ -146,8 +146,8 @@ def callback(ctx, param, value): state.installstate.selective_upgrade = value return value return option("--selective-upgrade", is_flag=True, default=False, type=click.types.BOOL, - help="Update specified packages.", callback=callback, - expose_value=False)(f) + help="Update specified packages.", callback=callback, + expose_value=False)(f) def ignore_pipfile_option(f): @@ -156,8 +156,8 @@ def callback(ctx, param, value): state.installstate.ignore_pipfile = value return value return option("--ignore-pipfile", is_flag=True, default=False, expose_value=False, - help="Ignore Pipfile when installing, using the Pipfile.lock.", - callback=callback, type=click.types.BOOL)(f) + help="Ignore Pipfile when installing, using the Pipfile.lock.", + callback=callback, type=click.types.BOOL)(f) def dev_option(f): @@ -166,8 +166,8 @@ def callback(ctx, param, value): state.installstate.dev = value return value return option("--dev", "-d", is_flag=True, default=False, type=click.types.BOOL, - help="Install both develop and default packages.", callback=callback, - expose_value=False)(f) + help="Install both develop and default packages.", callback=callback, + expose_value=False)(f) def pre_option(f): @@ -176,7 +176,7 @@ def callback(ctx, param, value): state.installstate.pre = value return value return option("--pre", is_flag=True, default=False, help=u"Allow pre-releases.", - callback=callback, type=click.types.BOOL, expose_value=False)(f) + callback=callback, type=click.types.BOOL, expose_value=False)(f) def package_arg(f): @@ -298,7 +298,7 @@ def callback(ctx, param, value): return value return option("--deploy", is_flag=True, default=False, type=click.types.BOOL, help=u"Abort if the Pipfile.lock is out-of-date, or Python version is" - " wrong.", callback=callback, expose_value=False)(f) + " wrong.", callback=callback, expose_value=False)(f) def setup_verbosity(ctx, param, value): From 86c894d81b2378ca8d962b1af3b904f81265d4d8 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 00:45:47 -0500 Subject: [PATCH 65/71] Woops, delegator isn't updated, neither packaging Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index f82df6a708..0a98277beb 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -6,7 +6,7 @@ click==7.0 click-completion==0.5.0 click-didyoumean==0.0.3 colorama==0.3.9 -delegator.py==0.1.2 +delegator.py==0.1.1 pexpect==4.6.0 ptyprocess==0.6.0 python-dotenv==0.9.1 @@ -30,7 +30,7 @@ requests==2.20.1 requirementslib==1.3.1 attrs==18.2.0 distlib==0.2.8 - packaging==18.1 + packaging==18.0 pyparsing==2.2.2 plette==0.2.2 tomlkit==0.5.2 From ada66d3e72d53ce0d2051b626a85dc0a593eacc5 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 00:51:26 -0500 Subject: [PATCH 66/71] Remove old patch Signed-off-by: Dan Ryan --- .../patches/vendor/vistir-spin-colorama.patch | 28 ------------------- 1 file changed, 28 deletions(-) delete mode 100644 tasks/vendoring/patches/vendor/vistir-spin-colorama.patch diff --git a/tasks/vendoring/patches/vendor/vistir-spin-colorama.patch b/tasks/vendoring/patches/vendor/vistir-spin-colorama.patch deleted file mode 100644 index 69e4cac735..0000000000 --- a/tasks/vendoring/patches/vendor/vistir-spin-colorama.patch +++ /dev/null @@ -1,28 +0,0 @@ -diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py -index 2a848922..57a90277 100644 ---- a/pipenv/vendor/vistir/spin.py -+++ b/pipenv/vendor/vistir/spin.py -@@ -5,6 +5,7 @@ import os - import signal - import sys - -+import colorama - import cursor - import six - -@@ -31,6 +32,7 @@ CLEAR_LINE = chr(27) + "[K" - - class DummySpinner(object): - def __init__(self, text="", **kwargs): -+ colorama.init() - self.text = to_native_string(text) - self.stdout = kwargs.get("stdout", sys.stdout) - self.stderr = kwargs.get("stderr", sys.stderr) -@@ -112,7 +114,6 @@ class VistirSpinner(base_obj): - """ - - self.handler = handler -- import colorama - colorama.init() - sigmap = {} - if handler: From bfa0b290cca65f3ed8b95350aaf37eb1ed794d7b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 01:11:59 -0500 Subject: [PATCH 67/71] Update vendored deps Signed-off-by: Dan Ryan --- pipenv/vendor/click_completion/__init__.py | 2 +- pipenv/vendor/click_completion/lib.py | 11 +- pipenv/vendor/passa/cli/options.py | 7 +- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/models/python.py | 4 +- pipenv/vendor/pythonfinder/pythonfinder.py | 3 +- pipenv/vendor/pythonfinder/utils.py | 22 +- pipenv/vendor/pytoml/LICENSE | 16 - pipenv/vendor/pytoml/__init__.py | 3 - pipenv/vendor/pytoml/core.py | 13 - pipenv/vendor/pytoml/parser.py | 374 ------------------ pipenv/vendor/pytoml/writer.py | 127 ------ pipenv/vendor/requests/__version__.py | 4 +- pipenv/vendor/requests/sessions.py | 13 +- pipenv/vendor/requests/utils.py | 4 +- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/exceptions.py | 4 +- .../vendor/requirementslib/models/pipfile.py | 20 +- .../requirementslib/models/setup_info.py | 3 +- pipenv/vendor/requirementslib/utils.py | 2 +- pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/contextmanagers.py | 2 +- pipenv/vendor/vistir/path.py | 9 +- 23 files changed, 70 insertions(+), 579 deletions(-) delete mode 100644 pipenv/vendor/pytoml/LICENSE delete mode 100644 pipenv/vendor/pytoml/__init__.py delete mode 100644 pipenv/vendor/pytoml/core.py delete mode 100644 pipenv/vendor/pytoml/parser.py delete mode 100644 pipenv/vendor/pytoml/writer.py diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py index b849ae2342..e30cc0e362 100644 --- a/pipenv/vendor/click_completion/__init__.py +++ b/pipenv/vendor/click_completion/__init__.py @@ -19,7 +19,7 @@ from click_completion.lib import get_auto_shell from click_completion.patch import patch as _patch -__version__ = '0.4.1' +__version__ = '0.5.0' _initialized = False diff --git a/pipenv/vendor/click_completion/lib.py b/pipenv/vendor/click_completion/lib.py index cd53bc03c0..167ecfd8f9 100644 --- a/pipenv/vendor/click_completion/lib.py +++ b/pipenv/vendor/click_completion/lib.py @@ -59,7 +59,7 @@ def double_quote(s): return '"' + s.replace('"', '"\'"\'"') + '"' -def resolve_ctx(cli, prog_name, args): +def resolve_ctx(cli, prog_name, args, resilient_parsing=True): """ Parameters @@ -76,13 +76,18 @@ def resolve_ctx(cli, prog_name, args): click.core.Context A new context corresponding to the current command """ - ctx = cli.make_context(prog_name, list(args), resilient_parsing=True) + ctx = cli.make_context(prog_name, list(args), resilient_parsing=resilient_parsing) while ctx.args + ctx.protected_args and isinstance(ctx.command, MultiCommand): a = ctx.protected_args + ctx.args cmd = ctx.command.get_command(ctx, a[0]) if cmd is None: return None - ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) + if hasattr(cmd, "no_args_is_help"): + no_args_is_help = cmd.no_args_is_help + cmd.no_args_is_help = False + ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=resilient_parsing) + if hasattr(cmd, "no_args_is_help"): + cmd.no_args_is_help = no_args_is_help return ctx diff --git a/pipenv/vendor/passa/cli/options.py b/pipenv/vendor/passa/cli/options.py index da89a3b11b..f8ba1fe73b 100644 --- a/pipenv/vendor/passa/cli/options.py +++ b/pipenv/vendor/passa/cli/options.py @@ -46,9 +46,12 @@ def add_to_group(self, group): class ArgumentGroup(object): - def __init__(self, name, parser=None, is_mutually_exclusive=False, required=None, options=[]): + def __init__( + self, name, parser=None, + is_mutually_exclusive=False, + required=None, options=None): self.name = name - self.options = options + self.options = options or [] self.parser = parser self.required = required self.is_mutually_exclusive = is_mutually_exclusive diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index 85666b5c28..fd9ec1be5b 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = '1.1.8' +__version__ = '1.1.9' # Add NullHandler to "pythonfinder" logger, because Python2's default root # logger has no handler and warnings like this would be reported: diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 7feee84e85..db7d011ec0 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -414,8 +414,8 @@ def from_path(cls, path, name=None): if not path.is_python and not IGNORE_UNSUPPORTED: raise ValueError("Not a valid python path: %s" % path.path) return - py_version = get_python_version(path.path.as_posix()) - instance_dict = cls.parse(py_version) + py_version = get_python_version(path.path.absolute().as_posix()) + instance_dict = cls.parse(py_version.strip()) if not isinstance(instance_dict.get("version"), Version) and not IGNORE_UNSUPPORTED: raise ValueError("Not a valid python path: %s" % path.path) return diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index b3bad57042..011754eafc 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -9,7 +9,8 @@ class Finder(object): def __init__(self, path=None, system=False, global_search=True, ignore_unsupported=True): - """Finder A cross-platform Finder for locating python and other executables. + """ + Finder A cross-platform Finder for locating python and other executables. Searches for python and other specified binaries starting in `path`, if supplied, but searching the bin path of `sys.executable` if `system=True`, and then diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index a26d054800..2debd80ee0 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -12,17 +12,17 @@ import vistir -from .environment import PYENV_ROOT +from .environment import PYENV_ROOT, ASDF_DATA_DIR from .exceptions import InvalidPythonVersion +six.add_move(six.MovedAttribute("Iterable", "collections", "collections.abc")) +from six.moves import Iterable + try: from functools import lru_cache except ImportError: from backports.functools_lru_cache import lru_cache -six.add_move(six.MovedAttribute("Iterable", "collections", "collections.abc")) -from six.moves import Iterable - PYTHON_IMPLEMENTATIONS = ( "python", "ironpython", "jython", "pypy", "anaconda", "miniconda", @@ -52,7 +52,7 @@ def get_python_version(path): version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: c = vistir.misc.run(version_cmd, block=True, nospin=True, return_object=True, - combine_stderr=False) + combine_stderr=False) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -64,10 +64,6 @@ def optional_instance_of(cls): return attr.validators.optional(attr.validators.instance_of(cls)) -def path_and_exists(path): - return attr.validators.instance_of(vistir.compat.Path) and path.exists() - - def path_is_executable(path): return os.access(str(path), os.X_OK) @@ -95,7 +91,8 @@ def path_is_python(path): @lru_cache(maxsize=1024) def ensure_path(path): - """Given a path (either a string or a Path object), expand variables and return a Path object. + """ + Given a path (either a string or a Path object), expand variables and return a Path object. :param path: A string or a :class:`~pathlib.Path` object. :type path: str or :class:`~pathlib.Path` @@ -115,6 +112,7 @@ def _filter_none(k, v): return False +# TODO: Reimplement in vistir def normalize_path(path): return os.path.normpath(os.path.normcase( os.path.abspath(os.path.expandvars(os.path.expanduser(str(path)))) @@ -128,9 +126,10 @@ def filter_pythons(path): path = vistir.compat.Path(str(path)) if not path.is_dir(): return path if path_is_python(path) else None - return filter(lambda x: path_is_python(x), path.iterdir()) + return filter(path_is_python, path.iterdir()) +# TODO: Port to vistir def unnest(item): if isinstance(item, Iterable) and not isinstance(item, six.string_types): item, target = itertools.tee(item, 2) @@ -168,5 +167,6 @@ def parse_asdf_version_order(filename=".tool-versions"): return versions.split() +# TODO: Reimplement in vistir def is_in_path(path, parent): return normalize_path(str(path)).startswith(normalize_path(str(parent))) diff --git a/pipenv/vendor/pytoml/LICENSE b/pipenv/vendor/pytoml/LICENSE deleted file mode 100644 index 9739fc67c6..0000000000 --- a/pipenv/vendor/pytoml/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -No-notice MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/pipenv/vendor/pytoml/__init__.py b/pipenv/vendor/pytoml/__init__.py deleted file mode 100644 index 8dc731553d..0000000000 --- a/pipenv/vendor/pytoml/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .core import TomlError -from .parser import load, loads -from .writer import dump, dumps diff --git a/pipenv/vendor/pytoml/core.py b/pipenv/vendor/pytoml/core.py deleted file mode 100644 index c182734e1c..0000000000 --- a/pipenv/vendor/pytoml/core.py +++ /dev/null @@ -1,13 +0,0 @@ -class TomlError(RuntimeError): - def __init__(self, message, line, col, filename): - RuntimeError.__init__(self, message, line, col, filename) - self.message = message - self.line = line - self.col = col - self.filename = filename - - def __str__(self): - return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) - - def __repr__(self): - return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) diff --git a/pipenv/vendor/pytoml/parser.py b/pipenv/vendor/pytoml/parser.py deleted file mode 100644 index 9f94e9230a..0000000000 --- a/pipenv/vendor/pytoml/parser.py +++ /dev/null @@ -1,374 +0,0 @@ -import string, re, sys, datetime -from .core import TomlError - -if sys.version_info[0] == 2: - _chr = unichr -else: - _chr = chr - -def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): - return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) - -def loads(s, filename='', translate=lambda t, x, v: v, object_pairs_hook=dict): - if isinstance(s, bytes): - s = s.decode('utf-8') - - s = s.replace('\r\n', '\n') - - root = object_pairs_hook() - tables = object_pairs_hook() - scope = root - - src = _Source(s, filename=filename) - ast = _p_toml(src, object_pairs_hook=object_pairs_hook) - - def error(msg): - raise TomlError(msg, pos[0], pos[1], filename) - - def process_value(v, object_pairs_hook): - kind, text, value, pos = v - if kind == 'str' and value.startswith('\n'): - value = value[1:] - if kind == 'array': - if value and any(k != value[0][0] for k, t, v, p in value[1:]): - error('array-type-mismatch') - value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] - elif kind == 'table': - value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) - return translate(kind, text, value) - - for kind, value, pos in ast: - if kind == 'kv': - k, v = value - if k in scope: - error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) - scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) - else: - is_table_array = (kind == 'table_array') - cur = tables - for name in value[:-1]: - if isinstance(cur.get(name), list): - d, cur = cur[name][-1] - else: - d, cur = cur.setdefault(name, (None, object_pairs_hook())) - - scope = object_pairs_hook() - name = value[-1] - if name not in cur: - if is_table_array: - cur[name] = [(scope, object_pairs_hook())] - else: - cur[name] = (scope, object_pairs_hook()) - elif isinstance(cur[name], list): - if not is_table_array: - error('table_type_mismatch') - cur[name].append((scope, object_pairs_hook())) - else: - if is_table_array: - error('table_type_mismatch') - old_scope, next_table = cur[name] - if old_scope is not None: - error('duplicate_tables') - cur[name] = (scope, next_table) - - def merge_tables(scope, tables): - if scope is None: - scope = object_pairs_hook() - for k in tables: - if k in scope: - error('key_table_conflict') - v = tables[k] - if isinstance(v, list): - scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] - else: - scope[k] = merge_tables(v[0], v[1]) - return scope - - return merge_tables(root, tables) - -class _Source: - def __init__(self, s, filename=None): - self.s = s - self._pos = (1, 1) - self._last = None - self._filename = filename - self.backtrack_stack = [] - - def last(self): - return self._last - - def pos(self): - return self._pos - - def fail(self): - return self._expect(None) - - def consume_dot(self): - if self.s: - self._last = self.s[0] - self.s = self[1:] - self._advance(self._last) - return self._last - return None - - def expect_dot(self): - return self._expect(self.consume_dot()) - - def consume_eof(self): - if not self.s: - self._last = '' - return True - return False - - def expect_eof(self): - return self._expect(self.consume_eof()) - - def consume(self, s): - if self.s.startswith(s): - self.s = self.s[len(s):] - self._last = s - self._advance(s) - return True - return False - - def expect(self, s): - return self._expect(self.consume(s)) - - def consume_re(self, re): - m = re.match(self.s) - if m: - self.s = self.s[len(m.group(0)):] - self._last = m - self._advance(m.group(0)) - return m - return None - - def expect_re(self, re): - return self._expect(self.consume_re(re)) - - def __enter__(self): - self.backtrack_stack.append((self.s, self._pos)) - - def __exit__(self, type, value, traceback): - if type is None: - self.backtrack_stack.pop() - else: - self.s, self._pos = self.backtrack_stack.pop() - return type == TomlError - - def commit(self): - self.backtrack_stack[-1] = (self.s, self._pos) - - def _expect(self, r): - if not r: - raise TomlError('msg', self._pos[0], self._pos[1], self._filename) - return r - - def _advance(self, s): - suffix_pos = s.rfind('\n') - if suffix_pos == -1: - self._pos = (self._pos[0], self._pos[1] + len(s)) - else: - self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) - -_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') -def _p_ews(s): - s.expect_re(_ews_re) - -_ws_re = re.compile(r'[ \t]*') -def _p_ws(s): - s.expect_re(_ws_re) - -_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'', - '\\': '\\', '/': '/', 'f': '\f' } - -_basicstr_re = re.compile(r'[^"\\\000-\037]*') -_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') -_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') -_escapes_re = re.compile('[bnrt"\'\\\\/f]') -_newline_esc_re = re.compile('\n[ \t\n]*') -def _p_basicstr_content(s, content=_basicstr_re): - res = [] - while True: - res.append(s.expect_re(content).group(0)) - if not s.consume('\\'): - break - if s.consume_re(_newline_esc_re): - pass - elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): - res.append(_chr(int(s.last().group(1), 16))) - else: - s.expect_re(_escapes_re) - res.append(_escapes[s.last().group(0)]) - return ''.join(res) - -_key_re = re.compile(r'[0-9a-zA-Z-_]+') -def _p_key(s): - with s: - s.expect('"') - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return r - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return r - return s.expect_re(_key_re).group(0) - -_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') -_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') - -_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') -_litstr_re = re.compile(r"[^'\000\010\012-\037]*") -_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") -def _p_value(s, object_pairs_hook): - pos = s.pos() - - if s.consume('true'): - return 'bool', s.last(), True, pos - if s.consume('false'): - return 'bool', s.last(), False, pos - - if s.consume('"'): - if s.consume('""'): - r = _p_basicstr_content(s, _basicstr_ml_re) - s.expect('"""') - else: - r = _p_basicstr_content(s, _basicstr_re) - s.expect('"') - return 'str', r, r, pos - - if s.consume('\''): - if s.consume('\'\''): - r = s.expect_re(_litstr_ml_re).group(0) - s.expect('\'\'\'') - else: - r = s.expect_re(_litstr_re).group(0) - s.expect('\'') - return 'str', r, r, pos - - if s.consume_re(_datetime_re): - m = s.last() - s0 = m.group(0) - r = map(int, m.groups()[:6]) - if m.group(7): - micro = float(m.group(7)) - else: - micro = 0 - - if m.group(8): - g = int(m.group(8), 10) * 60 + int(m.group(9), 10) - tz = _TimeZone(datetime.timedelta(0, g * 60)) - else: - tz = _TimeZone(datetime.timedelta(0, 0)) - - y, m, d, H, M, S = r - dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) - return 'datetime', s0, dt, pos - - if s.consume_re(_float_re): - m = s.last().group(0) - r = m.replace('_','') - if '.' in m or 'e' in m or 'E' in m: - return 'float', m, float(r), pos - else: - return 'int', m, int(r, 10), pos - - if s.consume('['): - items = [] - with s: - while True: - _p_ews(s) - items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) - s.commit() - _p_ews(s) - s.expect(',') - s.commit() - _p_ews(s) - s.expect(']') - return 'array', None, items, pos - - if s.consume('{'): - _p_ws(s) - items = object_pairs_hook() - if not s.consume('}'): - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - while s.consume(','): - _p_ws(s) - k = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) - _p_ws(s) - s.expect('}') - return 'table', None, items, pos - - s.fail() - -def _p_stmt(s, object_pairs_hook): - pos = s.pos() - if s.consume( '['): - is_array = s.consume('[') - _p_ws(s) - keys = [_p_key(s)] - _p_ws(s) - while s.consume('.'): - _p_ws(s) - keys.append(_p_key(s)) - _p_ws(s) - s.expect(']') - if is_array: - s.expect(']') - return 'table_array' if is_array else 'table', keys, pos - - key = _p_key(s) - _p_ws(s) - s.expect('=') - _p_ws(s) - value = _p_value(s, object_pairs_hook=object_pairs_hook) - return 'kv', (key, value), pos - -_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') -def _p_toml(s, object_pairs_hook): - stmts = [] - _p_ews(s) - with s: - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - while True: - s.commit() - s.expect_re(_stmtsep_re) - stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) - _p_ews(s) - s.expect_eof() - return stmts - -class _TimeZone(datetime.tzinfo): - def __init__(self, offset): - self._offset = offset - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return None - - def tzname(self, dt): - m = self._offset.total_seconds() // 60 - if m < 0: - res = '-' - m = -m - else: - res = '+' - h = m // 60 - m = m - h * 60 - return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/pipenv/vendor/pytoml/writer.py b/pipenv/vendor/pytoml/writer.py deleted file mode 100644 index 6eaf5d76aa..0000000000 --- a/pipenv/vendor/pytoml/writer.py +++ /dev/null @@ -1,127 +0,0 @@ -from __future__ import unicode_literals -import io, datetime, math, sys - -if sys.version_info[0] == 3: - long = int - unicode = str - - -def dumps(obj, sort_keys=False): - fout = io.StringIO() - dump(obj, fout, sort_keys=sort_keys) - return fout.getvalue() - - -_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} - - -def _escape_string(s): - res = [] - start = 0 - - def flush(): - if start != i: - res.append(s[start:i]) - return i + 1 - - i = 0 - while i < len(s): - c = s[i] - if c in '"\\\n\r\t\b\f': - start = flush() - res.append('\\' + _escapes[c]) - elif ord(c) < 0x20: - start = flush() - res.append('\\u%04x' % ord(c)) - i += 1 - - flush() - return '"' + ''.join(res) + '"' - - -def _escape_id(s): - if any(not c.isalnum() and c not in '-_' for c in s): - return _escape_string(s) - return s - - -def _format_list(v): - return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) - -# Formula from: -# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds -# Once support for py26 is dropped, this can be replaced by td.total_seconds() -def _total_seconds(td): - return ((td.microseconds - + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6) - -def _format_value(v): - if isinstance(v, bool): - return 'true' if v else 'false' - if isinstance(v, int) or isinstance(v, long): - return unicode(v) - if isinstance(v, float): - if math.isnan(v) or math.isinf(v): - raise ValueError("{0} is not a valid TOML value".format(v)) - else: - return repr(v) - elif isinstance(v, unicode) or isinstance(v, bytes): - return _escape_string(v) - elif isinstance(v, datetime.datetime): - offs = v.utcoffset() - offs = _total_seconds(offs) // 60 if offs is not None else 0 - - if offs == 0: - suffix = 'Z' - else: - if offs > 0: - suffix = '+' - else: - suffix = '-' - offs = -offs - suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60) - - if v.microsecond: - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix - else: - return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix - elif isinstance(v, list): - return _format_list(v) - else: - raise RuntimeError(v) - - -def dump(obj, fout, sort_keys=False): - tables = [((), obj, False)] - - while tables: - name, table, is_array = tables.pop() - if name: - section_name = '.'.join(_escape_id(c) for c in name) - if is_array: - fout.write('[[{0}]]\n'.format(section_name)) - else: - fout.write('[{0}]\n'.format(section_name)) - - table_keys = sorted(table.keys()) if sort_keys else table.keys() - new_tables = [] - has_kv = False - for k in table_keys: - v = table[k] - if isinstance(v, dict): - new_tables.append((name + (k,), v, False)) - elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): - new_tables.extend((name + (k,), d, True) for d in v) - elif v is None: - # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 - fout.write( - '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) - has_kv = True - else: - fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) - has_kv = True - - tables.extend(reversed(new_tables)) - - if (name or has_kv) and tables: - fout.write('\n') diff --git a/pipenv/vendor/requests/__version__.py b/pipenv/vendor/requests/__version__.py index be8a45fe0e..803773a0fd 100644 --- a/pipenv/vendor/requests/__version__.py +++ b/pipenv/vendor/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' -__version__ = '2.20.0' -__build__ = 0x022000 +__version__ = '2.20.1' +__build__ = 0x022001 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' diff --git a/pipenv/vendor/requests/sessions.py b/pipenv/vendor/requests/sessions.py index a448bd83f2..d73d700fa6 100644 --- a/pipenv/vendor/requests/sessions.py +++ b/pipenv/vendor/requests/sessions.py @@ -19,7 +19,7 @@ from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers +from .utils import to_key_val_list, default_headers, DEFAULT_PORTS from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) @@ -128,8 +128,17 @@ def should_strip_auth(self, old_url, new_url): if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if (not changed_scheme and old_parsed.port in default_port + and new_parsed.port in default_port): + return False + # Standard case: root URI must match - return old_parsed.port != new_parsed.port or old_parsed.scheme != new_parsed.scheme + return changed_port or changed_scheme def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): diff --git a/pipenv/vendor/requests/utils.py b/pipenv/vendor/requests/utils.py index 0ce7fe115c..8170a8d2c4 100644 --- a/pipenv/vendor/requests/utils.py +++ b/pipenv/vendor/requests/utils.py @@ -38,6 +38,8 @@ DEFAULT_CA_BUNDLE_PATH = certs.where() +DEFAULT_PORTS = {'http': 80, 'https': 443} + if sys.platform == 'win32': # provide a proxy_bypass version on Windows without DNS lookups @@ -264,7 +266,7 @@ def from_key_val_list(value): >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') - ValueError: need more than 1 value to unpack + ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index f6c985d303..32415c6134 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.7' +__version__ = '1.3.1' import logging import warnings diff --git a/pipenv/vendor/requirementslib/exceptions.py b/pipenv/vendor/requirementslib/exceptions.py index 23bc5e5024..17b884eb46 100644 --- a/pipenv/vendor/requirementslib/exceptions.py +++ b/pipenv/vendor/requirementslib/exceptions.py @@ -24,7 +24,7 @@ class RequirementError(Exception): class MissingParameter(Exception): def __init__(self, param): - super(Exception, self).__init__() + Exception.__init__(self) print("Missing parameter: %s" % param, file=sys.stderr, flush=True) @@ -43,7 +43,7 @@ def __init__(self, path, *args, **kwargs): self.path = path self.backup_path = backup_path self.show(self.path, self.backup_path) - super(OSError, self).__init__(path, *args, **kwargs) + OSError.__init__(self, path, *args, **kwargs) @classmethod def show(cls, path, backup_path=None): diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index dbb024be6b..0f6de6bfaf 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -53,17 +53,19 @@ def load(cls, f, encoding=None): _data["source"] = _data.get("source", []) + _data.get("sources", []) _data = reorder_source_keys(_data) if "source" not in _data: - # HACK: There is no good way to prepend a section to an existing - # TOML document, but there's no good way to copy non-structural - # content from one TOML document to another either. Modify the - # TOML content directly, and load the new in-memory document. - sep = "" if content.startswith("\n") else "\n" - content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content + if "sources" in _data: + _data["source"] = _data["sources"] + content = tomlkit.dumps(_data) + else: + # HACK: There is no good way to prepend a section to an existing + # TOML document, but there's no good way to copy non-structural + # content from one TOML document to another either. Modify the + # TOML content directly, and load the new in-memory document. + sep = "" if content.startswith("\n") else "\n" + content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content data = tomlkit.loads(content) - data = reorder_source_keys(data) instance = cls(data) - new_data = reorder_source_keys(instance._data) - instance._data = new_data + instance._data = dict(instance._data) return instance def __getattribute__(self, key): diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 6107a24075..006ee60969 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -223,7 +223,6 @@ def run_setup(self): if self.setup_py is not None and self.setup_py.exists(): target_cwd = self.setup_py.parent.as_posix() with cd(target_cwd), _suppress_distutils_logs(): - from setuptools.dist import distutils script_name = self.setup_py.as_posix() args = ["egg_info", "--egg-base", self.base_dir] g = {"__file__": script_name, "__name__": "__main__"} @@ -306,7 +305,7 @@ def get_info(self): if not self.requires or not self.name: try: self.run_setup() - except Exception as e: + except Exception: self.get_egg_metadata() if not self.requires or not self.name: self.get_egg_metadata() diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 312efbb324..f3653e32c1 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -88,7 +88,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): - """Cleans VCS uris from pip format""" + """Cleans VCS uris from pipenv.patched.notpip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index f3554d5606..809c973cf2 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -31,7 +31,7 @@ from .spin import VistirSpinner, create_spinner -__version__ = "0.2.4" +__version__ = '0.2.4' __all__ = [ diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 3f19112087..0920a9c3dd 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -63,7 +63,7 @@ def cd(path): >>> print(os.path.abspath(os.curdir)) '/home/user/code/myrepo' >>> with cd("/home/user/code/otherdir/subdir"): - print("Changed directory: %s" % os.path.abspath(os.curdir)) + ... print("Changed directory: %s" % os.path.abspath(os.curdir)) Changed directory: /home/user/code/otherdir/subdir >>> print(os.path.abspath(os.curdir)) '/home/user/code/myrepo' diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 23ae025205..febaddbccc 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -from __future__ import absolute_import, unicode_literals +from __future__ import absolute_import, unicode_literals, print_function import atexit import errno @@ -275,9 +275,12 @@ def set_write_bit(fn): file_stat = os.stat(fn).st_mode os.chmod(fn, file_stat | stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) if not os.path.isdir(fn): - return + try: + os.chflags(fn, 0) + except AttributeError: + pass for root, dirs, files in os.walk(fn, topdown=False): - for dir_ in [os.path.join(root, d) for d in dirs]: + for dir_ in [os.path.join(root,d) for d in dirs]: set_write_bit(dir_) for file_ in [os.path.join(root, f) for f in files]: set_write_bit(file_) From 5e5e1ed4bc0e09c3a45394c2369502e0ead217c2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 12:23:58 -0500 Subject: [PATCH 68/71] Stray print Signed-off-by: Dan Ryan --- pipenv/cli/command.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index 75a3b96613..14120e35fc 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -226,7 +226,6 @@ def install( ): """Installs provided packages and adds them to Pipfile, or (if no packages are given), installs all packages from Pipfile.""" from ..core import do_install - echo("Skip lock value: %s" % state.installstate.skip_lock) retcode = do_install( dev=state.installstate.dev, From 08d94d4ebc7f21e49ac1781c73d2c03433ac730d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 16:53:20 -0500 Subject: [PATCH 69/71] Update core and rebuild ci Signed-off-by: Dan Ryan --- pipenv/core.py | 25 ++++++++++++++++--------- pipenv/exceptions.py | 2 +- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 8504553c27..d3e9002c78 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -36,8 +36,6 @@ download_file, is_pinned, is_star, - rmtree, - clean_resolved_dep, parse_indexes, escape_cmd, create_spinner, @@ -135,8 +133,8 @@ def do_clear(): from pip import locations try: - shutil.rmtree(PIPENV_CACHE_DIR) - shutil.rmtree(locations.USER_CACHE_DIR) + vistir.path.rmtree(PIPENV_CACHE_DIR) + vistir.path.rmtree(locations.USER_CACHE_DIR) except OSError as e: # Ignore FileNotFoundError. This is needed for Python 2.7. import errno @@ -186,7 +184,7 @@ def cleanup_virtualenv(bare=True): click.echo(crayons.red("Environment creation aborted.")) try: # Delete the virtualenv. - rmtree(project.virtualenv_location) + vistir.path.rmtree(project.virtualenv_location) except OSError as e: click.echo( "{0} An error occurred while removing {1}!".format( @@ -657,8 +655,8 @@ def do_where(virtualenv=False, bare=True): def _cleanup_procs(procs, concurrent, failed_deps_queue, retry=True): while not procs.empty(): c = procs.get() - # if concurrent: - c.block() + if concurrent: + c.block() failed = False if c.return_code != 0: failed = True @@ -740,6 +738,8 @@ def batch_install(deps_list, procs, failed_deps_queue, trusted_hosts=trusted_hosts, extra_indexes=extra_indexes ) + if dep.is_vcs: + c.block() if procs.qsize() < nprocs: c.dep = dep procs.put(c) @@ -760,7 +760,8 @@ def do_install_dependencies( requirements_dir=None, pypi_mirror=False, ): - """"Executes the install functionality. + """" + Executes the install functionality. If requirements is True, simply spits out a requirements format to stdout. """ @@ -1060,6 +1061,12 @@ def do_lock( lockfile[section_name][canonical_name] = cached_lockfile[ section_name ][canonical_name].copy() + for key in ["default", "develop"]: + packages = set(cached_lockfile[key].keys()) + new_lockfile = set(lockfile[key].keys()) + missing = packages - new_lockfile + for missing_pkg in missing: + lockfile[key][missing_pkg] = cached_lockfile[key][missing_pkg].copy() # Overwrite any develop packages with default packages. lockfile["develop"].update(overwrite_dev(lockfile.get("default", {}), lockfile["develop"])) if write: @@ -1085,7 +1092,7 @@ def do_purge(bare=False, downloads=False, allow_global=False): if downloads: if not bare: click.echo(crayons.normal(fix_utf8("Clearing out downloads directory…"), bold=True)) - shutil.rmtree(project.download_location) + vistir.path.rmtree(project.download_location) return # Remove comments from the output, if any. diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 3fda80214c..62e25d5397 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -202,7 +202,7 @@ def __init__(self, hint=None, **kwargs): class SetupException(PipenvException): def __init__(self, message=None, **kwargs): - PipenvException.__init__(message, **kwargs) + PipenvException.__init__(self, message, **kwargs) class VirtualenvException(PipenvException): From a8f4c7e2a3839667a8ff0e0626768e635b44004a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 16:56:06 -0500 Subject: [PATCH 70/71] accidental commit Signed-off-by: Dan Ryan --- pipenv/core.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index d3e9002c78..f815e5de9b 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1061,12 +1061,6 @@ def do_lock( lockfile[section_name][canonical_name] = cached_lockfile[ section_name ][canonical_name].copy() - for key in ["default", "develop"]: - packages = set(cached_lockfile[key].keys()) - new_lockfile = set(lockfile[key].keys()) - missing = packages - new_lockfile - for missing_pkg in missing: - lockfile[key][missing_pkg] = cached_lockfile[key][missing_pkg].copy() # Overwrite any develop packages with default packages. lockfile["develop"].update(overwrite_dev(lockfile.get("default", {}), lockfile["develop"])) if write: From 41cbe4dbdd927206cca6d07150b73f5eb434baff Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 14 Nov 2018 17:01:30 -0500 Subject: [PATCH 71/71] Pin postreleases to pass packaging ci Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 0a98277beb..ff7226b2de 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -21,13 +21,13 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.9 +pythonfinder==1.1.9.post1 requests==2.20.1 chardet==3.0.4 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.3.1 +requirementslib==1.3.1.post1 attrs==18.2.0 distlib==0.2.8 packaging==18.0