diff --git a/docs/advanced.rst b/docs/advanced.rst index 4d0119fcd0..0ffe831711 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -58,7 +58,6 @@ Alternatively, you can set the ``PIPENV_PYPI_MIRROR`` environment variable. ☤ Injecting credentials into Pipfiles via environment variables ----------------------------------------------------------------- - Pipenv will expand environment variables (if defined) in your Pipfile. Quite useful if you need to authenticate to a private PyPI:: @@ -76,6 +75,17 @@ If your credentials contain a special character, surround the references to the [[source]] url = "https://$USERNAME:'${PASSWORD}'@mypypi.example.com/simple" +Environment variables may be specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. + +On Windows, ``%MY_ENVAR%`` is supported in addition to ``${MY_ENVAR}`` or ``$MY_ENVAR``. + +Environment variables in the URL part of requirement specifiers can also be expanded, where the variable must be in the form of ``${VAR_NAME}``. Neither ``$VAR_NAME`` nor ``%VAR_NAME%`` is acceptable:: + + [[package]] + requests = {git = "git://${USERNAME}:${PASSWORD}@private.git.com/psf/requests.git", ref = "2.22.0"} + +Keep in mind that environment variables are expanded in runtime, leaving the entries in ``Pipfile`` or ``Pipfile.lock`` untouched. This is to avoid the accidental leakage of credentials in the source code. + ☤ Specifying Basically Anything ------------------------------- @@ -436,32 +446,6 @@ You can then display the names and commands of your shortcuts by running ``pipen command script echospam echo I am really a very silly example - -☤ Support for Environment Variables ------------------------------------ - -Pipenv supports the usage of environment variables in place of authentication fragments -in your Pipfile. These will only be parsed if they are present in the ``[[source]]`` -section. For example: - -.. code-block:: toml - - [[source]] - url = "https://${PYPI_USERNAME}:${PYPI_PASSWORD}@my_private_repo.example.com/simple" - verify_ssl = true - name = "pypi" - - [dev-packages] - - [packages] - requests = {version="*", index="home"} - maya = {version="*", index="pypi"} - records = "*" - -Environment variables may be specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. - -On Windows, ``%MY_ENVAR%`` is supported in addition to ``${MY_ENVAR}`` or ``$MY_ENVAR``. - .. _configuration-with-environment-variables: ☤ Configuration With Environment Variables diff --git a/news/3516.feature.rst b/news/3516.feature.rst new file mode 100644 index 0000000000..5036250b4f --- /dev/null +++ b/news/3516.feature.rst @@ -0,0 +1 @@ +Support expanding environment variables in requirement URLs. diff --git a/news/4533.vendor.rst b/news/4533.vendor.rst new file mode 100644 index 0000000000..3f742d9941 --- /dev/null +++ b/news/4533.vendor.rst @@ -0,0 +1,25 @@ +Update vendored dependencies: +- ``colorama`` from ``0.4.3`` to ``0.4.4`` +- ``python-dotenv`` from ``0.10.3`` to ``0.15.0`` +- ``first`` from ``2.0.1`` to ``2.0.2`` +- ``iso8601`` from ``0.1.12`` to ``0.1.13`` +- ``parse`` from ``1.15.0`` to ``1.18.0`` +- ``pipdeptree`` from ``0.13.2`` to ``1.0.0`` +- ``requests`` from ``2.23.0`` to ``2.25.0`` +- ``idna`` from ``2.9`` to ``2.10`` +- ``urllib3`` from ``1.25.9`` to ``1.26.1`` +- ``certifi`` from ``2020.4.5.1`` to ``2020.11.8`` +- ``requirementslib`` from ``1.5.15`` to ``1.5.16`` +- ``attrs`` from ``19.3.0`` to ``20.3.0`` +- ``distlib`` from ``0.3.0`` to ``0.3.1`` +- ``packaging`` from ``20.3`` to ``20.4`` +- ``six`` from ``1.14.0`` to ``1.15.0`` +- ``semver`` from ``2.9.0`` to ``2.13.0`` +- ``toml`` from ``0.10.1`` to ``0.10.2`` +- ``cached-property`` from ``1.5.1`` to ``1.5.2`` +- ``yaspin`` from ``0.14.3`` to ``1.2.0`` +- ``resolvelib`` from ``0.3.0`` to ``0.5.2`` +- ``pep517`` from ``0.8.2`` to ``0.9.1`` +- ``zipp`` from ``0.6.0`` to ``1.2.0`` +- ``importlib-metadata`` from ``1.6.0`` to ``2.0.0`` +- ``importlib-resources`` from ``1.5.0`` to ``3.3.0`` diff --git a/pipenv/vendor/attr/__init__.py b/pipenv/vendor/attr/__init__.py index 9ff4d47ffe..bf329cad5c 100644 --- a/pipenv/vendor/attr/__init__.py +++ b/pipenv/vendor/attr/__init__.py @@ -1,10 +1,12 @@ from __future__ import absolute_import, division, print_function +import sys + from functools import partial -from . import converters, exceptions, filters, validators +from . import converters, exceptions, filters, setters, validators from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, evolve, has +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types from ._make import ( NOTHING, Attribute, @@ -19,7 +21,7 @@ from ._version_info import VersionInfo -__version__ = "19.3.0" +__version__ = "20.3.0" __version_info__ = VersionInfo._from_version_string(__version__) __title__ = "attrs" @@ -39,7 +41,6 @@ ib = attr = attrib dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - __all__ = [ "Attribute", "Factory", @@ -61,8 +62,15 @@ "has", "ib", "make_class", + "resolve_types", "s", "set_run_validators", + "setters", "validate", "validators", ] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable + + __all__.extend((define, field, frozen, mutable)) diff --git a/pipenv/vendor/attr/__init__.pyi b/pipenv/vendor/attr/__init__.pyi index 38f16f06ba..442d6e77fb 100644 --- a/pipenv/vendor/attr/__init__.pyi +++ b/pipenv/vendor/attr/__init__.pyi @@ -18,6 +18,7 @@ from typing import ( from . import exceptions as exceptions from . import filters as filters from . import converters as converters +from . import setters as setters from . import validators as validators from ._version_info import VersionInfo @@ -37,20 +38,26 @@ _T = TypeVar("_T") _C = TypeVar("_C", bound=type) _ValidatorType = Callable[[Any, Attribute[_T], _T], Any] -_ConverterType = Callable[[Any], _T] +_ConverterType = Callable[[Any], Any] _FilterType = Callable[[Attribute[_T], _T], bool] _ReprType = Callable[[Any], str] _ReprArgType = Union[bool, _ReprType] -# FIXME: in reality, if multiple validators are passed they must be in a list or tuple, -# but those are invariant and so would prevent subtypes of _ValidatorType from working -# when passed in a list or tuple. +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[[type, List[Attribute]], List[Attribute]] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] # _make -- NOTHING: object -# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)` +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` # Work around mypy issue #4554 in the common case by using an overload. @overload def Factory(factory: Callable[[], _T]) -> _T: ... @@ -70,16 +77,17 @@ class Attribute(Generic[_T]): order: bool hash: Optional[bool] init: bool - converter: Optional[_ConverterType[_T]] + converter: Optional[_ConverterType] metadata: Dict[Any, Any] type: Optional[Type[_T]] kw_only: bool + on_setattr: _OnSetAttrType # NOTE: We had several choices for the annotation to use for type arg: # 1) Type[_T] # - Pros: Handles simple cases correctly -# - Cons: Might produce less informative errors in the case of conflicting TypeVars -# e.g. `attr.ib(default='bad', type=int)` +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` # 2) Callable[..., _T] # - Pros: Better error messages than #1 for conflicting TypeVars # - Cons: Terrible error messages for validator checks. @@ -97,7 +105,8 @@ class Attribute(Generic[_T]): # This makes this type of assignments possible: # x: int = attr(8) # -# This form catches explicit None or no default but with no other arguments returns Any. +# This form catches explicit None or no default but with no other arguments +# returns Any. @overload def attrib( default: None = ..., @@ -113,9 +122,11 @@ def attrib( kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... -# This form catches an explicit None or no default and infers the type from the other arguments. +# This form catches an explicit None or no default and infers the type from the +# other arguments. @overload def attrib( default: None = ..., @@ -126,11 +137,12 @@ def attrib( init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType[_T]] = ..., + converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form catches an explicit default argument. @@ -144,11 +156,12 @@ def attrib( init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType[_T]] = ..., + converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form covers type=non-Type: e.g. forward references (str), Any @@ -162,11 +175,83 @@ def attrib( init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: object = ..., - converter: Optional[_ConverterType[_T]] = ..., + converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... @overload def attrs( @@ -187,6 +272,11 @@ def attrs( auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., ) -> _C: ... @overload def attrs( @@ -207,8 +297,62 @@ def attrs( auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> Callable[[_C], _C]: ... +@overload +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> _C: ... +@overload +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., ) -> Callable[[_C], _C]: ... +mutable = define +frozen = define # they differ only in their defaults + # TODO: add support for returning NamedTuple from the mypy plugin class _Fields(Tuple[Attribute[Any], ...]): def __getattr__(self, name: str) -> Attribute[Any]: ... @@ -216,9 +360,15 @@ class _Fields(Tuple[Attribute[Any], ...]): def fields(cls: type) -> _Fields: ... def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., +) -> _C: ... # TODO: add support for returning a proper attrs class from the mypy plugin -# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid def make_class( name: str, attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], @@ -238,12 +388,16 @@ def make_class( auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., ) -> type: ... # _funcs -- # TODO: add support for returning TypedDict from the mypy plugin -# FIXME: asdict/astuple do not honor their factory args. waiting on one of these: +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: # https://github.com/python/mypy/issues/4236 # https://github.com/python/typing/issues/253 def asdict( @@ -252,6 +406,7 @@ def asdict( filter: Optional[_FilterType[Any]] = ..., dict_factory: Type[Mapping[Any, Any]] = ..., retain_collection_types: bool = ..., + value_serializer: Optional[Callable[[type, Attribute, Any], Any]] = ..., ) -> Dict[str, Any]: ... # TODO: add support for returning NamedTuple from the mypy plugin diff --git a/pipenv/vendor/attr/_compat.py b/pipenv/vendor/attr/_compat.py index a915db8ebe..b0ead6e1c8 100644 --- a/pipenv/vendor/attr/_compat.py +++ b/pipenv/vendor/attr/_compat.py @@ -19,9 +19,10 @@ if PY2: - from UserDict import IterableUserDict from collections import Mapping, Sequence + from UserDict import IterableUserDict + # We 'bundle' isclass instead of using inspect as importing inspect is # fairly expensive (order of 10-15 ms for a modern machine in 2016) def isclass(klass): @@ -90,7 +91,7 @@ def metadata_proxy(d): res.data.update(d) # We blocked update, so we have to do it like this. return res - def just_warn(*args, **kw): # pragma: nocover + def just_warn(*args, **kw): # pragma: no cover """ We only warn on Python 3 because we are not aware of any concrete consequences of not setting the cell on Python 2. @@ -131,7 +132,7 @@ def make_set_closure_cell(): """ # pypy makes this easy. (It also supports the logic below, but # why not do the easy/fast thing?) - if PYPY: # pragma: no cover + if PYPY: def set_closure_cell(cell, value): cell.__setstate__((value,)) diff --git a/pipenv/vendor/attr/_funcs.py b/pipenv/vendor/attr/_funcs.py index c077e4284f..e6c930cbd1 100644 --- a/pipenv/vendor/attr/_funcs.py +++ b/pipenv/vendor/attr/_funcs.py @@ -13,6 +13,7 @@ def asdict( filter=None, dict_factory=dict, retain_collection_types=False, + value_serializer=None, ): """ Return the ``attrs`` attribute values of *inst* as a dict. @@ -32,6 +33,10 @@ def asdict( :param bool retain_collection_types: Do not convert to ``list`` when encountering an attribute whose type is ``tuple`` or ``set``. Only meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. :rtype: return type of *dict_factory* @@ -40,6 +45,7 @@ def asdict( .. versionadded:: 16.0.0 *dict_factory* .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* """ attrs = fields(inst.__class__) rv = dict_factory() @@ -47,17 +53,30 @@ def asdict( v = getattr(inst, a.name) if filter is not None and not filter(a, v): continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + if recurse is True: if has(v.__class__): rv[a.name] = asdict( - v, True, filter, dict_factory, retain_collection_types + v, + True, + filter, + dict_factory, + retain_collection_types, + value_serializer, ) - elif isinstance(v, (tuple, list, set)): + elif isinstance(v, (tuple, list, set, frozenset)): cf = v.__class__ if retain_collection_types is True else list rv[a.name] = cf( [ _asdict_anything( - i, filter, dict_factory, retain_collection_types + i, + filter, + dict_factory, + retain_collection_types, + value_serializer, ) for i in v ] @@ -67,10 +86,18 @@ def asdict( rv[a.name] = df( ( _asdict_anything( - kk, filter, df, retain_collection_types + kk, + filter, + df, + retain_collection_types, + value_serializer, ), _asdict_anything( - vv, filter, df, retain_collection_types + vv, + filter, + df, + retain_collection_types, + value_serializer, ), ) for kk, vv in iteritems(v) @@ -82,19 +109,36 @@ def asdict( return rv -def _asdict_anything(val, filter, dict_factory, retain_collection_types): +def _asdict_anything( + val, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): """ ``asdict`` only works on attrs instances, this works on anything. """ if getattr(val.__class__, "__attrs_attrs__", None) is not None: # Attrs class. - rv = asdict(val, True, filter, dict_factory, retain_collection_types) - elif isinstance(val, (tuple, list, set)): + rv = asdict( + val, + True, + filter, + dict_factory, + retain_collection_types, + value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): cf = val.__class__ if retain_collection_types is True else list rv = cf( [ _asdict_anything( - i, filter, dict_factory, retain_collection_types + i, + filter, + dict_factory, + retain_collection_types, + value_serializer, ) for i in val ] @@ -103,13 +147,20 @@ def _asdict_anything(val, filter, dict_factory, retain_collection_types): df = dict_factory rv = df( ( - _asdict_anything(kk, filter, df, retain_collection_types), - _asdict_anything(vv, filter, df, retain_collection_types), + _asdict_anything( + kk, filter, df, retain_collection_types, value_serializer + ), + _asdict_anything( + vv, filter, df, retain_collection_types, value_serializer + ), ) for kk, vv in iteritems(val) ) else: rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + return rv @@ -164,7 +215,7 @@ def astuple( retain_collection_types=retain, ) ) - elif isinstance(v, (tuple, list, set)): + elif isinstance(v, (tuple, list, set, frozenset)): cf = v.__class__ if retain is True else list rv.append( cf( @@ -209,6 +260,7 @@ def astuple( rv.append(v) else: rv.append(v) + return rv if tuple_factory is list else tuple_factory(rv) @@ -287,4 +339,52 @@ def evolve(inst, **changes): init_name = attr_name if attr_name[0] != "_" else attr_name[1:] if init_name not in changes: changes[init_name] = getattr(inst, attr_name) + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attr.s`. That means + the decorator has to come in the line **before** `attr.s`. + + .. versionadded:: 20.1.0 + """ + try: + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + cls.__attrs_types_resolved__ + except AttributeError: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls): + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + cls.__attrs_types_resolved__ = True + + # Return the class so you can use it as a decorator too. + return cls diff --git a/pipenv/vendor/attr/_make.py b/pipenv/vendor/attr/_make.py index 46f9c54ec1..49484f935f 100644 --- a/pipenv/vendor/attr/_make.py +++ b/pipenv/vendor/attr/_make.py @@ -9,9 +9,10 @@ from operator import itemgetter -from . import _config +from . import _config, setters from ._compat import ( PY2, + PYPY, isclass, iteritems, metadata_proxy, @@ -29,7 +30,7 @@ # This is used at least twice, so cache it here. _obj_setattr = object.__setattr__ -_init_converter_pat = "__attr_converter_{}" +_init_converter_pat = "__attr_converter_%s" _init_factory_pat = "__attr_factory_{}" _tuple_property_pat = ( " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" @@ -70,6 +71,31 @@ def __repr__(self): """ +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + def attrib( default=NOTHING, validator=None, @@ -84,6 +110,7 @@ def attrib( kw_only=False, eq=None, order=None, + on_setattr=None, ): """ Create a new attribute on a class. @@ -101,7 +128,7 @@ def attrib( used to construct a new value (useful for mutable data types like lists or dicts). - If a default is not set (or set manually to ``attr.NOTHING``), a value + If a default is not set (or set manually to `attr.NOTHING`), a value *must* be supplied when instantiating; otherwise a `TypeError` will be raised. @@ -110,7 +137,7 @@ def attrib( :type default: Any value :param callable factory: Syntactic sugar for - ``default=attr.Factory(callable)``. + ``default=attr.Factory(factory)``. :param validator: `callable` that is called by ``attrs``-generated ``__init__`` methods after the instance has been initialized. They @@ -120,7 +147,7 @@ def attrib( The return value is *not* inspected so the validator has to throw an exception itself. - If a ``list`` is passed, its items are treated as validators and must + If a `list` is passed, its items are treated as validators and must all pass. Validators can be globally disabled and re-enabled using @@ -128,7 +155,7 @@ def attrib( The validator can also be set using decorator notation as shown below. - :type validator: ``callable`` or a ``list`` of ``callable``\\ s. + :type validator: `callable` or a `list` of `callable`\\ s. :param repr: Include this attribute in the generated ``__repr__`` method. If ``True``, include the attribute; if ``False``, omit it. By @@ -137,7 +164,7 @@ def attrib( value and returns a string. Note that the resulting string is used as-is, i.e. it will be used directly *instead* of calling ``repr()`` (the default). - :type repr: a ``bool`` or a ``callable`` to use a custom function. + :type repr: a `bool` or a `callable` to use a custom function. :param bool eq: If ``True`` (default), include this attribute in the generated ``__eq__`` and ``__ne__`` methods that check two instances for equality. @@ -145,17 +172,16 @@ def attrib( generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, order=True``. Deprecated in favor of *eq* and *order*. - :param hash: Include this attribute in the generated ``__hash__`` - method. If ``None`` (default), mirror *eq*'s value. This is the - correct behavior according the Python spec. Setting this value to - anything else than ``None`` is *discouraged*. - :type hash: ``bool`` or ``None`` + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. :param bool init: Include this attribute in the generated ``__init__`` method. It is possible to set this to ``False`` and set a default value. In that case this attributed is unconditionally initialized with the specified default value or factory. :param callable converter: `callable` that is called by - ``attrs``-generated ``__init__`` methods to converter attribute's value + ``attrs``-generated ``__init__`` methods to convert attribute's value to the desired format. It is given the passed-in value, and the returned value will be used as the new value of the attribute. The value is converted before being passed to the validator, if any. @@ -174,6 +200,12 @@ def attrib( :param kw_only: Make this attribute keyword-only (Python 3+) in the generated ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attr.setters.NO_OP` .. versionadded:: 15.2.0 *convert* .. versionadded:: 16.3.0 *metadata* @@ -191,8 +223,10 @@ def attrib( .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 """ - eq, order = _determine_eq_order(cmp, eq, order) + eq, order = _determine_eq_order(cmp, eq, order, True) if hash is not None and hash is not True and hash is not False: raise TypeError( @@ -212,6 +246,16 @@ def attrib( if metadata is None: metadata = {} + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + return _CountingAttr( default=default, validator=validator, @@ -225,6 +269,7 @@ def attrib( kw_only=kw_only, eq=eq, order=order, + on_setattr=on_setattr, ) @@ -282,20 +327,32 @@ def _is_class_var(annot): return str(annot).startswith(_classvar_prefixes) -def _get_annotations(cls): +def _has_own_attribute(cls, attrib_name): """ - Get annotations for *cls*. + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. """ - anns = getattr(cls, "__annotations__", None) - if anns is None: - return {} + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False - # Verify that the annotations aren't merely inherited. for base_cls in cls.__mro__[1:]: - if anns is getattr(base_cls, "__annotations__", None): - return {} + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False - return anns + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} def _counter_getter(e): @@ -305,12 +362,76 @@ def _counter_getter(e): return e[1].counter -def _transform_attrs(cls, these, auto_attribs, kw_only): +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): """ Transform all `_CountingAttr`s on a class into `Attribute`s. If *these* is passed, use that and don't look for them on the class. + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + Return an `_Attributes`. """ cd = cls.__dict__ @@ -334,6 +455,7 @@ def _transform_attrs(cls, these, auto_attribs, kw_only): continue annot_names.add(attr_name) a = cd.get(attr_name, NOTHING) + if not isinstance(a, _CountingAttr): if a is NOTHING: a = attrib() @@ -367,30 +489,22 @@ def _transform_attrs(cls, these, auto_attribs, kw_only): for attr_name, ca in ca_list ] - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - taken_attr_names = {a.name: a for a in own_attrs} - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - sub_attrs = getattr(base_cls, "__attrs_attrs__", None) - if sub_attrs is not None: - for a in sub_attrs: - prev_a = taken_attr_names.get(a.name) - # Only add an attribute if it hasn't been defined before. This - # allows for overwriting attribute definitions by subclassing. - if prev_a is None: - base_attrs.append(a) - taken_attr_names[a.name] = a - base_attr_map[a.name] = base_cls + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) attr_names = [a.name for a in base_attrs + own_attrs] AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) if kw_only: - own_attrs = [a._assoc(kw_only=True) for a in own_attrs] - base_attrs = [a._assoc(kw_only=True) for a in base_attrs] + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] attrs = AttrsClass(base_attrs + own_attrs) @@ -409,14 +523,34 @@ def _transform_attrs(cls, these, auto_attribs, kw_only): if had_default is False and a.default is not NOTHING: had_default = True + if field_transformer is not None: + attrs = field_transformer(cls, attrs) return _Attributes((attrs, base_attrs, base_attr_map)) -def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - raise FrozenInstanceError() +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() def _frozen_delattrs(self, name): @@ -432,19 +566,22 @@ class _ClassBuilder(object): """ __slots__ = ( - "_cls", - "_cls_dict", + "_attr_names", "_attrs", + "_base_attr_map", "_base_names", - "_attr_names", - "_slots", - "_frozen", - "_weakref_slot", "_cache_hash", - "_has_post_init", + "_cls", + "_cls_dict", "_delete_attribs", - "_base_attr_map", + "_frozen", + "_has_post_init", "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_has_own_setattr", + "_has_custom_setattr", ) def __init__( @@ -454,13 +591,23 @@ def __init__( slots, frozen, weakref_slot, + getstate_setstate, auto_attribs, kw_only, cache_hash, is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, ): attrs, base_attrs, base_map = _transform_attrs( - cls, these, auto_attribs, kw_only + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, ) self._cls = cls @@ -470,12 +617,16 @@ def __init__( self._base_attr_map = base_map self._attr_names = tuple(a.name for a in attrs) self._slots = slots - self._frozen = frozen or _has_frozen_base_class(cls) + self._frozen = frozen self._weakref_slot = weakref_slot self._cache_hash = cache_hash self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) self._delete_attribs = not bool(these) self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._has_own_setattr = False self._cls_dict["__attrs_attrs__"] = self._attrs @@ -483,6 +634,14 @@ def __init__( self._cls_dict["__setattr__"] = _frozen_setattrs self._cls_dict["__delattr__"] = _frozen_delattrs + self._has_own_setattr = True + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + def __repr__(self): return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) @@ -523,25 +682,15 @@ def _patch_original_class(self): for name, value in self._cls_dict.items(): setattr(cls, name, value) - # Attach __setstate__. This is necessary to clear the hash code - # cache on deserialization. See issue - # https://github.com/python-attrs/attrs/issues/482 . - # Note that this code only handles setstate for dict classes. - # For slotted classes, see similar code in _create_slots_class . - if self._cache_hash: - existing_set_state_method = getattr(cls, "__setstate__", None) - if existing_set_state_method: - raise NotImplementedError( - "Currently you cannot use hash caching if " - "you specify your own __setstate__ method." - "See https://github.com/python-attrs/attrs/issues/494 ." - ) - - def cache_hash_set_state(chss_self, _): - # clear hash code cache - setattr(chss_self, _hash_cache_field, None) + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._has_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False - setattr(cls, "__setstate__", cache_hash_set_state) + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ return cls @@ -556,11 +705,27 @@ def _create_slots_class(self): if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") } - weakref_inherited = False + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._has_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break # Traverse the MRO to check for an existing __weakref__. + weakref_inherited = False for base_cls in self._cls.__mro__[1:-1]: - if "__weakref__" in getattr(base_cls, "__dict__", ()): + if base_cls.__dict__.get("__weakref__", None) is not None: weakref_inherited = True break @@ -574,7 +739,7 @@ def _create_slots_class(self): names += ("__weakref__",) # We only add the names of attributes that aren't inherited. - # Settings __slots__ to inherited attributes wastes memory. + # Setting __slots__ to inherited attributes wastes memory. slot_names = [name for name in names if name not in base_names] if self._cache_hash: slot_names.append(_hash_cache_field) @@ -584,38 +749,6 @@ def _create_slots_class(self): if qualname is not None: cd["__qualname__"] = qualname - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return tuple(getattr(self, name) for name in state_attr_names) - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - # Clearing the hash code cache on deserialization is needed - # because hash codes can change from run to run. See issue - # https://github.com/python-attrs/attrs/issues/482 . - # Note that this code only handles setstate for slotted classes. - # For dict classes, see similar code in _patch_original_class . - if hash_caching_enabled: - __bound_setattr(_hash_cache_field, None) - - # slots and frozen require __getstate__/__setstate__ to work - cd["__getstate__"] = slots_getstate - cd["__setstate__"] = slots_setstate - # Create new class based on old class and our methods. cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) @@ -636,8 +769,13 @@ def slots_setstate(self, state): if not closure_cells: # Catch None or the empty list. continue for cell in closure_cells: - if cell.cell_contents is self._cls: - set_closure_cell(cell, cls) + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) return cls @@ -660,6 +798,40 @@ def __str__(self): self._cls_dict["__str__"] = self._add_method_dunders(__str__) return self + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + def make_unhashable(self): self._cls_dict["__hash__"] = None return self @@ -687,6 +859,8 @@ def add_init(self): self._cache_hash, self._base_attr_map, self._is_exc, + self._on_setattr is not None + and self._on_setattr is not setters.NO_OP, ) ) @@ -695,10 +869,10 @@ def add_init(self): def add_eq(self): cd = self._cls_dict - cd["__eq__"], cd["__ne__"] = ( - self._add_method_dunders(meth) - for meth in _make_eq(self._cls, self._attrs) + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) return self @@ -712,6 +886,42 @@ def add_order(self): return self + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._has_own_setattr = True + + return self + def _add_method_dunders(self, method): """ Add __module__ and __qualname__ to a *method* if possible. @@ -728,6 +938,13 @@ def _add_method_dunders(self, method): except AttributeError: pass + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + return method @@ -737,10 +954,10 @@ def _add_method_dunders(self, method): ) -def _determine_eq_order(cmp, eq, order): +def _determine_eq_order(cmp, eq, order, default_eq): """ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. + values of eq and order. If *eq* is None, set it to *default_eq*. """ if cmp is not None and any((eq is not None, order is not None)): raise ValueError("Don't mix `cmp` with `eq' and `order`.") @@ -751,9 +968,10 @@ def _determine_eq_order(cmp, eq, order): return cmp, cmp - # If left None, equality is on and ordering mirrors equality. + # If left None, equality is set to the specified default and ordering + # mirrors equality. if eq is None: - eq = True + eq = default_eq if order is None: order = eq @@ -764,14 +982,42 @@ def _determine_eq_order(cmp, eq, order): return eq, order +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + def attrs( maybe_cls=None, these=None, repr_ns=None, - repr=True, + repr=None, cmp=None, hash=None, - init=True, + init=None, slots=False, frozen=False, weakref_slot=True, @@ -782,6 +1028,11 @@ def attrs( auto_exc=False, eq=None, order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, ): r""" A class decorator that adds `dunder @@ -806,28 +1057,52 @@ def attrs( :param str repr_ns: When using nested classes, there's no way in Python 2 to automatically detect that. Therefore it's possible to set the namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and won't create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + a `PythonTooOldError`. + :param bool repr: Create a ``__repr__`` method with a human readable representation of ``attrs`` attributes.. :param bool str: Create a ``__str__`` method that is identical to ``__repr__``. This is usually not necessary except for `Exception`\ s. - :param bool eq: If ``True`` or ``None`` (default), add ``__eq__`` and - ``__ne__`` methods that check two instances for equality. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. They compare the instances as if they were tuples of their ``attrs`` - attributes, but only iff the types of both classes are *identical*! - :type eq: `bool` or `None` - :param bool order: If ``True``, add ``__lt__``, ``__le__``, ``__gt__``, - and ``__ge__`` methods that behave like *eq* above and allow instances - to be ordered. If ``None`` (default) mirror value of *eq*. - :type order: `bool` or `None` - :param cmp: Setting to ``True`` is equivalent to setting ``eq=True, - order=True``. Deprecated in favor of *eq* and *order*, has precedence - over them for backward-compatibility though. Must not be mixed with - *eq* or *order*. - :type cmp: `bool` or `None` - :param hash: If ``None`` (default), the ``__hash__`` method is generated - according how *eq* and *frozen* are set. + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting to ``True`` is equivalent to setting + ``eq=True, order=True``. Deprecated in favor of *eq* and *order*, has + precedence over them for backward-compatibility though. Must not be + mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to @@ -845,18 +1120,19 @@ def attrs( `object.__hash__`, and the `GitHub issue that led to the default \ behavior `_ for more details. - :type hash: ``bool`` or ``None`` :param bool init: Create a ``__init__`` method that initializes the ``attrs`` attributes. Leading underscores are stripped for the argument name. If a ``__attrs_post_init__`` method exists on the class, it will be called after the class is fully initialized. :param bool slots: Create a `slotted class ` that's more - memory-efficient. + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. :param bool frozen: Make instances immutable after initialization. If someone attempts to modify a frozen instance, `attr.exceptions.FrozenInstanceError` is raised. - Please note: + .. note:: 1. This is achieved by installing a custom ``__setattr__`` method on your class, so you can't implement your own. @@ -872,10 +1148,12 @@ def attrs( circumvent that limitation by using ``object.__setattr__(self, "attribute_name", value)``. + 5. Subclasses of a frozen class are frozen too. + :param bool weakref_slot: Make instances weak-referenceable. This has no effect unless ``slots`` is also enabled. - :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes - (Python 3.6 and later only) from the class body. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. In this case, you **must** annotate every field. If ``attrs`` encounters a field that is set to an `attr.ib` but lacks a type @@ -915,6 +1193,46 @@ def attrs( default value are additionally available as a tuple in the ``args`` attribute, - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatability. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attr.setters.pipe`. + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. .. versionadded:: 16.0.0 *slots* .. versionadded:: 16.1.0 *frozen* @@ -940,37 +1258,86 @@ def attrs( .. versionadded:: 19.1.0 *auto_exc* .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* """ - eq, order = _determine_eq_order(cmp, eq, order) + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) def wrap(cls): if getattr(cls, "__class__", None) is None: raise TypeError("attrs only works with new-style classes.") + is_frozen = frozen or _has_frozen_base_class(cls) is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") builder = _ClassBuilder( cls, these, slots, - frozen, + is_frozen, weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), auto_attribs, kw_only, cache_hash, is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, ) - - if repr is True: + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): builder.add_repr(repr_ns) if str is True: builder.add_str() - if eq is True and not is_exc: + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: builder.add_eq() - if order is True and not is_exc: + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): builder.add_order() + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ if hash is not True and hash is not False and hash is not None: # Can't use `hash in` because 1 == True for example. raise TypeError( @@ -985,7 +1352,9 @@ def wrap(cls): " hashing must be either explicitly or implicitly " "enabled." ) - elif hash is True or (hash is None and eq is True and frozen is True): + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): # Build a __hash__ if told so, or if it's safe. builder.add_hash() else: @@ -998,7 +1367,9 @@ def wrap(cls): ) builder.make_unhashable() - if init is True: + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): builder.add_init() else: if cache_hash: @@ -1095,7 +1466,23 @@ def _make_hash(cls, attrs, frozen, cache_hash): unique_filename = _generate_unique_filename(cls, "hash") type_hash = hash(unique_filename) - method_lines = ["def __hash__(self):"] + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] def append_hash_computation_lines(prefix, indent): """ @@ -1103,14 +1490,18 @@ def append_hash_computation_lines(prefix, indent): Below this will either be returned directly or used to compute a value which is then cached, depending on the value of cache_hash """ + method_lines.extend( - [indent + prefix + "hash((", indent + " %d," % (type_hash,)] + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] ) for a in attrs: method_lines.append(indent + " self.%s," % a.name) - method_lines.append(indent + " ))") + method_lines.append(indent + " " + closing_braces) if cache_hash: method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) @@ -1153,19 +1544,29 @@ def _add_hash(cls, attrs): return cls -def __ne__(self, other): +def _make_ne(): """ - Check equality and either forward a NotImplemented or return the result - negated. + Create __ne__ method. """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - return not result + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ attrs = [a for a in attrs if a.eq] unique_filename = _generate_unique_filename(cls, "eq") @@ -1201,10 +1602,13 @@ def _make_eq(cls, attrs): script.splitlines(True), unique_filename, ) - return locs["__eq__"], __ne__ + return locs["__eq__"] def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ attrs = [a for a in attrs if a.order] def attrs_to_tuple(obj): @@ -1259,7 +1663,8 @@ def _add_eq(cls, attrs=None): if attrs is None: attrs = cls.__attrs_attrs__ - cls.__eq__, cls.__ne__ = _make_eq(cls, attrs) + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() return cls @@ -1337,43 +1742,6 @@ def _add_repr(cls, ns=None, attrs=None): return cls -def _make_init( - cls, attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc -): - attrs = [a for a in attrs if a.init or a.default is not NOTHING] - - unique_filename = _generate_unique_filename(cls, "init") - - script, globs, annotations = _attrs_to_init_script( - attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc - ) - locs = {} - bytecode = compile(script, unique_filename, "exec") - attr_dict = dict((a.name, a) for a in attrs) - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if frozen is True: - # Save the lookup overhead in __init__ if we need to circumvent - # immutability. - globs["_cached_setattr"] = _obj_setattr - - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - - __init__ = locs["__init__"] - __init__.__annotations__ = annotations - - return __init__ - - def fields(cls): """ Return the tuple of ``attrs`` attributes for a class. @@ -1458,8 +1826,191 @@ def _is_slot_attr(a_name, base_attr_map): return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) +def _make_init( + cls, + attrs, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + has_global_on_setattr, +): + if frozen and has_global_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif ( + has_global_on_setattr and a.on_setattr is not setters.NO_OP + ) or _is_slot_attr(a.name, base_attr_map): + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_global_on_setattr, + ) + locs = {} + bytecode = compile(script, unique_filename, "exec") + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + + __init__ = locs["__init__"] + __init__.__annotations__ = annotations + + return __init__ + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +if PY2: + + def _unpack_kw_only_py2(attr_name, default=None): + """ + Unpack *attr_name* from _kw_only dict. + """ + if default is not None: + arg_default = ", %s" % default + else: + arg_default = "" + return "%s = _kw_only.pop('%s'%s)" % ( + attr_name, + attr_name, + arg_default, + ) + + def _unpack_kw_only_lines_py2(kw_only_args): + """ + Unpack all *kw_only_args* from _kw_only dict and handle errors. + + Given a list of strings "{attr_name}" and "{attr_name}={default}" + generates list of lines of code that pop attrs from _kw_only dict and + raise TypeError similar to builtin if required attr is missing or + extra key is passed. + + >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) + try: + a = _kw_only.pop('a') + b = _kw_only.pop('b', 42) + except KeyError as _key_error: + raise TypeError( + ... + if _kw_only: + raise TypeError( + ... + """ + lines = ["try:"] + lines.extend( + " " + _unpack_kw_only_py2(*arg.split("=")) + for arg in kw_only_args + ) + lines += """\ +except KeyError as _key_error: + raise TypeError( + '__init__() missing required keyword-only argument: %s' % _key_error + ) +if _kw_only: + raise TypeError( + '__init__() got an unexpected keyword argument %r' + % next(iter(_kw_only)) + ) +""".split( + "\n" + ) + return lines + + def _attrs_to_init_script( - attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc + attrs, + frozen, + slots, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_global_on_setattr, ): """ Return a script of an initializer for *attrs* and a dict of globals. @@ -1470,85 +2021,49 @@ def _attrs_to_init_script( a cached ``object.__setattr__``. """ lines = [] - any_slot_ancestors = any( - _is_slot_attr(a.name, base_attr_map) for a in attrs - ) + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + if frozen is True: if slots is True: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - - def fmt_setter(attr_name, value_var): - return "_setattr('%(attr_name)s', %(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - } - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) - return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % { - "attr_name": attr_name, - "value_var": value_var, - "conv": conv_name, - } - + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter else: # Dict frozen classes assign directly to __dict__. # But only if the attribute doesn't come from an ancestor slot # class. # Note _inst_dict will be used again below if cache_hash is True lines.append("_inst_dict = self.__dict__") - if any_slot_ancestors: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup - # per assignment. - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - def fmt_setter(attr_name, value_var): - if _is_slot_attr(attr_name, base_attr_map): - res = "_setattr('%(attr_name)s', %(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - } - else: - res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % { - "attr_name": attr_name, - "value_var": value_var, - } - return res - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) + def fmt_setter(attr_name, value_var, has_on_setattr): if _is_slot_attr(attr_name, base_attr_map): - tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))" - else: - tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)" - return tmpl % { - "attr_name": attr_name, - "value_var": value_var, - "c": conv_name, - } + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) else: # Not frozen. - def fmt_setter(attr_name, value): - return "self.%(attr_name)s = %(value)s" % { - "attr_name": attr_name, - "value": value, - } - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) - return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - "conv": conv_name, - } + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter args = [] kw_only_args = [] @@ -1562,13 +2077,19 @@ def fmt_setter_with_converter(attr_name, value_var): for a in attrs: if a.validator: attrs_to_validate.append(a) + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_global_on_setattr + ) arg_name = a.name.lstrip("_") + has_factory = isinstance(a.default, Factory) if has_factory and a.default.takes_self: maybe_self = "self" else: maybe_self = "" + if a.init is False: if has_factory: init_factory_name = _init_factory_pat.format(a.name) @@ -1576,16 +2097,18 @@ def fmt_setter_with_converter(attr_name, value_var): lines.append( fmt_setter_with_converter( attr_name, - init_factory_name + "({0})".format(maybe_self), + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, ) ) - conv_name = _init_converter_pat.format(a.name) + conv_name = _init_converter_pat % (a.name,) names_for_globals[conv_name] = a.converter else: lines.append( fmt_setter( attr_name, - init_factory_name + "({0})".format(maybe_self), + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, ) ) names_for_globals[init_factory_name] = a.default.factory @@ -1594,70 +2117,78 @@ def fmt_setter_with_converter(attr_name, value_var): lines.append( fmt_setter_with_converter( attr_name, - "attr_dict['{attr_name}'].default".format( - attr_name=attr_name - ), + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, ) ) - conv_name = _init_converter_pat.format(a.name) + conv_name = _init_converter_pat % (a.name,) names_for_globals[conv_name] = a.converter else: lines.append( fmt_setter( attr_name, - "attr_dict['{attr_name}'].default".format( - attr_name=attr_name - ), + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, ) ) elif a.default is not NOTHING and not has_factory: - arg = "{arg_name}=attr_dict['{attr_name}'].default".format( - arg_name=arg_name, attr_name=attr_name - ) + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) if a.kw_only: kw_only_args.append(arg) else: args.append(arg) + if a.converter is not None: - lines.append(fmt_setter_with_converter(attr_name, arg_name)) + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) names_for_globals[ - _init_converter_pat.format(a.name) + _init_converter_pat % (a.name,) ] = a.converter else: - lines.append(fmt_setter(attr_name, arg_name)) + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + elif has_factory: - arg = "{arg_name}=NOTHING".format(arg_name=arg_name) + arg = "%s=NOTHING" % (arg_name,) if a.kw_only: kw_only_args.append(arg) else: args.append(arg) - lines.append( - "if {arg_name} is not NOTHING:".format(arg_name=arg_name) - ) + lines.append("if %s is not NOTHING:" % (arg_name,)) + init_factory_name = _init_factory_pat.format(a.name) if a.converter is not None: lines.append( - " " + fmt_setter_with_converter(attr_name, arg_name) + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) ) lines.append("else:") lines.append( " " + fmt_setter_with_converter( attr_name, - init_factory_name + "({0})".format(maybe_self), + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, ) ) names_for_globals[ - _init_converter_pat.format(a.name) + _init_converter_pat % (a.name,) ] = a.converter else: - lines.append(" " + fmt_setter(attr_name, arg_name)) + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) lines.append("else:") lines.append( " " + fmt_setter( attr_name, - init_factory_name + "({0})".format(maybe_self), + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, ) ) names_for_globals[init_factory_name] = a.default.factory @@ -1666,13 +2197,18 @@ def fmt_setter_with_converter(attr_name, value_var): kw_only_args.append(arg_name) else: args.append(arg_name) + if a.converter is not None: - lines.append(fmt_setter_with_converter(attr_name, arg_name)) + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) names_for_globals[ - _init_converter_pat.format(a.name) + _init_converter_pat % (a.name,) ] = a.converter else: - lines.append(fmt_setter(attr_name, arg_name)) + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) if a.init is True and a.converter is None and a.type is not None: annotations[arg_name] = a.type @@ -1681,13 +2217,14 @@ def fmt_setter_with_converter(attr_name, value_var): names_for_globals["_config"] = _config lines.append("if _config._run_validators is True:") for a in attrs_to_validate: - val_name = "__attr_validator_{}".format(a.name) - attr_name = "__attr_{}".format(a.name) + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name lines.append( - " {}(self, {}, self.{})".format(val_name, attr_name, a.name) + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) ) names_for_globals[val_name] = a.validator names_for_globals[attr_name] = a + if post_init: lines.append("self.__attrs_post_init__()") @@ -1718,14 +2255,14 @@ def fmt_setter_with_converter(attr_name, value_var): args = ", ".join(args) if kw_only_args: if PY2: - raise PythonTooOldError( - "Keyword-only arguments only work on Python 3 and later." - ) + lines = _unpack_kw_only_lines_py2(kw_only_args) + lines - args += "{leading_comma}*, {kw_only_args}".format( - leading_comma=", " if args else "", - kw_only_args=", ".join(kw_only_args), - ) + args += "%s**_kw_only" % (", " if args else "",) # leading comma + else: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) return ( """\ def __init__(self, {args}): @@ -1742,12 +2279,26 @@ class Attribute(object): """ *Read-only* representation of an attribute. + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The *field transformer* hook receives a list of them. + :attribute name: The name of the attribute. + :attribute inherited: Whether or not that attribute has been inherited from + a base class. Plus *all* arguments of `attr.ib` (except for ``factory`` which is only syntactic sugar for ``default=Factory(...)``. - For the version history of the fields, see `attr.ib`. + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + + For the full version history of the fields, see `attr.ib`. """ __slots__ = ( @@ -1763,6 +2314,8 @@ class Attribute(object): "type", "converter", "kw_only", + "inherited", + "on_setattr", ) def __init__( @@ -1774,14 +2327,16 @@ def __init__( cmp, # XXX: unused, remove along with other cmp code. hash, init, + inherited, metadata=None, type=None, converter=None, kw_only=False, eq=None, order=None, + on_setattr=None, ): - eq, order = _determine_eq_order(cmp, eq, order) + eq, order = _determine_eq_order(cmp, eq, order, True) # Cache this descriptor here to speed things up later. bound_setattr = _obj_setattr.__get__(self, Attribute) @@ -1807,6 +2362,8 @@ def __init__( ) bound_setattr("type", type) bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) def __setattr__(self, name, value): raise FrozenInstanceError() @@ -1829,6 +2386,7 @@ def from_counting_attr(cls, name, ca, type=None): "validator", "default", "type", + "inherited", ) # exclude methods and deprecated alias } return cls( @@ -1837,6 +2395,7 @@ def from_counting_attr(cls, name, ca, type=None): default=ca._default, type=type, cmp=None, + inherited=False, **inst_dict ) @@ -1849,10 +2408,17 @@ def cmp(self): return self.eq and self.order - # Don't use attr.assoc since fields(Attribute) doesn't work - def _assoc(self, **changes): + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): """ Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 """ new = copy.copy(self) @@ -1901,13 +2467,17 @@ def _setattrs(self, name_values_pairs): order=False, hash=(name != "metadata"), init=True, + inherited=False, ) for name in Attribute.__slots__ ] Attribute = _add_hash( - _add_eq(_add_repr(Attribute, attrs=_a), attrs=_a), - attrs=[a for a in _a if a.hash], + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], ) @@ -1933,6 +2503,7 @@ class _CountingAttr(object): "converter", "type", "kw_only", + "on_setattr", ) __attrs_attrs__ = tuple( Attribute( @@ -1946,6 +2517,8 @@ class _CountingAttr(object): kw_only=False, eq=True, order=False, + inherited=False, + on_setattr=None, ) for name in ( "counter", @@ -1955,6 +2528,7 @@ class _CountingAttr(object): "order", "hash", "init", + "on_setattr", ) ) + ( Attribute( @@ -1968,6 +2542,8 @@ class _CountingAttr(object): kw_only=False, eq=True, order=False, + inherited=False, + on_setattr=None, ), ) cls_counter = 0 @@ -1986,24 +2562,22 @@ def __init__( kw_only, eq, order, + on_setattr, ): _CountingAttr.cls_counter += 1 self.counter = _CountingAttr.cls_counter self._default = default - # If validator is a list/tuple, wrap it using helper validator. - if validator and isinstance(validator, (list, tuple)): - self._validator = and_(*validator) - else: - self._validator = validator + self._validator = validator + self.converter = converter self.repr = repr self.eq = eq self.order = order self.hash = hash self.init = init - self.converter = converter self.metadata = metadata self.type = type self.kw_only = kw_only + self.on_setattr = on_setattr def validator(self, meth): """ @@ -2072,8 +2646,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): """ A quick way to create a new class called *name* with *attrs*. - :param name: The name for the new class. - :type name: str + :param str name: The name for the new class. :param attrs: A list of names or a dictionary of mappings of names to attributes. @@ -2120,17 +2693,21 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): # We do it here for proper warnings with meaningful stacklevel. cmp = attributes_arguments.pop("cmp", None) - attributes_arguments["eq"], attributes_arguments[ - "order" - ] = _determine_eq_order( - cmp, attributes_arguments.get("eq"), attributes_arguments.get("order") + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, ) return _attrs(these=cls_dict, **attributes_arguments)(type_) # These are required by within this module so we define them here and merely -# import into .validators. +# import into .validators / .converters. @attrs(slots=True, hash=True) @@ -2152,8 +2729,7 @@ def and_(*validators): When called on a value, it runs all wrapped validators. - :param validators: Arbitrary number of validators. - :type validators: callables + :param callables validators: Arbitrary number of validators. .. versionadded:: 17.1.0 """ @@ -2166,3 +2742,24 @@ def and_(*validators): ) return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + return pipe_converter diff --git a/pipenv/vendor/attr/_next_gen.py b/pipenv/vendor/attr/_next_gen.py new file mode 100644 index 0000000000..2b5565c569 --- /dev/null +++ b/pipenv/vendor/attr/_next_gen.py @@ -0,0 +1,160 @@ +""" +This is a Python 3.6 and later-only, keyword-only, and **provisional** API that +calls `attr.s` with different default values. + +Provisional APIs that shall become "import attrs" one glorious day. +""" + +from functools import partial + +from attr.exceptions import UnannotatedAttributeError + +from . import setters +from ._make import NOTHING, _frozen_setattrs, attrib, attrs + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, +): + r""" + The only behavioral differences are the handling of the *auto_attribs* + option: + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If all attributes are annotated and no `attr.ib` is found, it assumes + *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attr.ib`\ s. + + and that mutable classes (``frozen=False``) validate on ``__setattr__``. + + .. versionadded:: 20.1.0 + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = setters.validate + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) diff --git a/pipenv/vendor/attr/converters.py b/pipenv/vendor/attr/converters.py index 8592897847..715ce17859 100644 --- a/pipenv/vendor/attr/converters.py +++ b/pipenv/vendor/attr/converters.py @@ -4,7 +4,14 @@ from __future__ import absolute_import, division, print_function -from ._make import NOTHING, Factory +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "pipe", + "optional", + "default_if_none", +] def optional(converter): diff --git a/pipenv/vendor/attr/converters.pyi b/pipenv/vendor/attr/converters.pyi index 63b2a3866e..7b0caa14f0 100644 --- a/pipenv/vendor/attr/converters.pyi +++ b/pipenv/vendor/attr/converters.pyi @@ -3,10 +3,9 @@ from . import _ConverterType _T = TypeVar("_T") -def optional( - converter: _ConverterType[_T] -) -> _ConverterType[Optional[_T]]: ... +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... @overload -def default_if_none(default: _T) -> _ConverterType[_T]: ... +def default_if_none(default: _T) -> _ConverterType: ... @overload -def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ... +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... diff --git a/pipenv/vendor/attr/exceptions.py b/pipenv/vendor/attr/exceptions.py index d1b76185c9..fcd89106f1 100644 --- a/pipenv/vendor/attr/exceptions.py +++ b/pipenv/vendor/attr/exceptions.py @@ -1,20 +1,37 @@ from __future__ import absolute_import, division, print_function -class FrozenInstanceError(AttributeError): +class FrozenError(AttributeError): """ - A frozen/immutable instance has been attempted to be modified. + A frozen/immutable instance or attribute haave been attempted to be + modified. It mirrors the behavior of ``namedtuples`` by using the same error message and subclassing `AttributeError`. - .. versionadded:: 16.1.0 + .. versionadded:: 20.1.0 """ msg = "can't set attribute" args = [msg] +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + class AttrsAttributeNotFoundError(ValueError): """ An ``attrs`` function couldn't find an attribute that the user asked for. @@ -51,7 +68,8 @@ class UnannotatedAttributeError(RuntimeError): class PythonTooOldError(RuntimeError): """ - An ``attrs`` feature requiring a more recent python version has been used. + It was attempted to use an ``attrs`` feature that requires a newer Python + version. .. versionadded:: 18.2.0 """ diff --git a/pipenv/vendor/attr/exceptions.pyi b/pipenv/vendor/attr/exceptions.pyi index 736fde2e1d..f2680118b4 100644 --- a/pipenv/vendor/attr/exceptions.pyi +++ b/pipenv/vendor/attr/exceptions.pyi @@ -1,8 +1,10 @@ from typing import Any -class FrozenInstanceError(AttributeError): +class FrozenError(AttributeError): msg: str = ... +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... class AttrsAttributeNotFoundError(ValueError): ... class NotAnAttrsClassError(ValueError): ... class DefaultAlreadySetError(RuntimeError): ... diff --git a/pipenv/vendor/attr/setters.py b/pipenv/vendor/attr/setters.py new file mode 100644 index 0000000000..240014b3c1 --- /dev/null +++ b/pipenv/vendor/attr/setters.py @@ -0,0 +1,77 @@ +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/pipenv/vendor/attr/setters.pyi b/pipenv/vendor/attr/setters.pyi new file mode 100644 index 0000000000..19bc33fd1e --- /dev/null +++ b/pipenv/vendor/attr/setters.pyi @@ -0,0 +1,18 @@ +from . import _OnSetAttrType, Attribute +from typing import TypeVar, Any, NewType, NoReturn, cast + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute, new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/pipenv/vendor/attr/validators.py b/pipenv/vendor/attr/validators.py index 839d310c38..b9a73054e9 100644 --- a/pipenv/vendor/attr/validators.py +++ b/pipenv/vendor/attr/validators.py @@ -67,7 +67,7 @@ def instance_of(type): return _InstanceOfValidator(type) -@attrs(repr=False, frozen=True) +@attrs(repr=False, frozen=True, slots=True) class _MatchesReValidator(object): regex = attrib() flags = attrib() @@ -171,7 +171,8 @@ def provides(interface): performed using ``interface.providedBy(value)`` (see `zope.interface `_). - :param zope.interface.Interface interface: The interface to check for. + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` :raises TypeError: With a human readable error message, the attribute (of type `attr.Attribute`), the expected interface, and the diff --git a/pipenv/vendor/cached-property.LICENSE b/pipenv/vendor/cached_property.LICENSE similarity index 100% rename from pipenv/vendor/cached-property.LICENSE rename to pipenv/vendor/cached_property.LICENSE diff --git a/pipenv/vendor/cached_property.py b/pipenv/vendor/cached_property.py index 125f619588..3135871bfb 100644 --- a/pipenv/vendor/cached_property.py +++ b/pipenv/vendor/cached_property.py @@ -2,9 +2,10 @@ __author__ = "Daniel Greenfeld" __email__ = "pydanny@gmail.com" -__version__ = "1.5.1" +__version__ = "1.5.2" __license__ = "BSD" +from functools import wraps from time import time import threading @@ -36,7 +37,7 @@ def __get__(self, obj, cls): return value def _wrap_in_coroutine(self, obj): - + @wraps(obj) @asyncio.coroutine def wrapper(): future = asyncio.ensure_future(self.func(obj)) diff --git a/pipenv/vendor/certifi/__init__.py b/pipenv/vendor/certifi/__init__.py index 1e2dfac7db..4e5133b261 100644 --- a/pipenv/vendor/certifi/__init__.py +++ b/pipenv/vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import contents, where -__version__ = "2020.04.05.1" +__version__ = "2020.11.08" diff --git a/pipenv/vendor/certifi/cacert.pem b/pipenv/vendor/certifi/cacert.pem index ece147c9dc..a1072085ce 100644 --- a/pipenv/vendor/certifi/cacert.pem +++ b/pipenv/vendor/certifi/cacert.pem @@ -58,38 +58,6 @@ AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== -----END CERTIFICATE----- -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -152,39 +120,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -640,46 +575,6 @@ VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= -----END CERTIFICATE----- -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -1127,38 +1022,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= -----END CERTIFICATE----- -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - # Issuer: CN=Certigna O=Dhimyotis # Subject: CN=Certigna O=Dhimyotis # Label: "Certigna" @@ -1499,47 +1362,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -2391,38 +2213,6 @@ e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p TpPDpFQUWw== -----END CERTIFICATE----- -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - # Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Label: "D-TRUST Root Class 3 CA 2 2009" @@ -3788,47 +3578,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW 1KyLa2tJElMzrdfkviT8tQp21KW8EA== -----END CERTIFICATE----- -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" @@ -4639,3 +4388,219 @@ IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk 5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== -----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- diff --git a/pipenv/vendor/certifi/core.py b/pipenv/vendor/certifi/core.py index 56b52a3c8f..5d2b8cd32f 100644 --- a/pipenv/vendor/certifi/core.py +++ b/pipenv/vendor/certifi/core.py @@ -9,7 +9,36 @@ import os try: - from importlib.resources import read_text + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + except ImportError: # This fallback will work for Python versions prior to 3.7 that lack the # importlib.resources module but relies on the existing `where` function @@ -19,11 +48,12 @@ def read_text(_module, _path, encoding="ascii"): with open(where(), "r", encoding=encoding) as data: return data.read() + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) -def where(): - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") + return os.path.join(f, "cacert.pem") def contents(): diff --git a/pipenv/vendor/colorama/__init__.py b/pipenv/vendor/colorama/__init__.py index 34c263cc8b..b149ed79b0 100644 --- a/pipenv/vendor/colorama/__init__.py +++ b/pipenv/vendor/colorama/__init__.py @@ -3,4 +3,4 @@ from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.4.3' +__version__ = '0.4.4' diff --git a/pipenv/vendor/colorama/ansi.py b/pipenv/vendor/colorama/ansi.py index 78776588db..11ec695ff7 100644 --- a/pipenv/vendor/colorama/ansi.py +++ b/pipenv/vendor/colorama/ansi.py @@ -6,7 +6,7 @@ CSI = '\033[' OSC = '\033]' -BEL = '\007' +BEL = '\a' def code_to_chars(code): diff --git a/pipenv/vendor/colorama/ansitowin32.py b/pipenv/vendor/colorama/ansitowin32.py index 359c92be50..6039a05432 100644 --- a/pipenv/vendor/colorama/ansitowin32.py +++ b/pipenv/vendor/colorama/ansitowin32.py @@ -3,7 +3,7 @@ import sys import os -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL from .winterm import WinTerm, WinColor, WinStyle from .win32 import windll, winapi_test @@ -68,7 +68,7 @@ class AnsiToWin32(object): win32 function calls. ''' ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command def __init__(self, wrapped, convert=None, strip=None, autoreset=False): # The wrapped stream (normally sys.stdout or sys.stderr) @@ -247,11 +247,12 @@ def convert_osc(self, text): start, end = match.span() text = text[:start] + text[end:] paramstring, command = match.groups() - if command in '\x07': # \x07 = BEL - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) return text diff --git a/pipenv/vendor/distlib/__init__.py b/pipenv/vendor/distlib/__init__.py index e19aebdc4c..63d916e345 100644 --- a/pipenv/vendor/distlib/__init__.py +++ b/pipenv/vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.3.0' +__version__ = '0.3.1' class DistlibException(Exception): pass diff --git a/pipenv/vendor/distlib/_backport/shutil.py b/pipenv/vendor/distlib/_backport/shutil.py index 159e49ee8c..10ed362539 100644 --- a/pipenv/vendor/distlib/_backport/shutil.py +++ b/pipenv/vendor/distlib/_backport/shutil.py @@ -14,7 +14,10 @@ import stat from os.path import abspath import fnmatch -import collections +try: + from collections.abc import Callable +except ImportError: + from collections import Callable import errno from . import tarfile @@ -528,7 +531,7 @@ def register_archive_format(name, function, extra_args=None, description=''): """ if extra_args is None: extra_args = [] - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') @@ -621,7 +624,7 @@ def _check_unpack_options(extensions, function, extra_args): raise RegistryError(msg % (extension, existing_extensions[extension])) - if not isinstance(function, collections.Callable): + if not isinstance(function, Callable): raise TypeError('The registered function must be a callable') diff --git a/pipenv/vendor/distlib/compat.py b/pipenv/vendor/distlib/compat.py index ff328c8ee4..c316fd973a 100644 --- a/pipenv/vendor/distlib/compat.py +++ b/pipenv/vendor/distlib/compat.py @@ -319,7 +319,7 @@ def python_implementation(): try: callable = callable except NameError: # pragma: no cover - from collections import Callable + from collections.abc import Callable def callable(obj): return isinstance(obj, Callable) diff --git a/pipenv/vendor/distlib/database.py b/pipenv/vendor/distlib/database.py index c16c0c8d9e..0a90c300ba 100644 --- a/pipenv/vendor/distlib/database.py +++ b/pipenv/vendor/distlib/database.py @@ -550,7 +550,7 @@ def __init__(self, path, metadata=None, env=None): r = finder.find(WHEEL_METADATA_FILENAME) # Temporary - for legacy support if r is None: - r = finder.find('METADATA') + r = finder.find(LEGACY_METADATA_FILENAME) if r is None: raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) diff --git a/pipenv/vendor/distlib/metadata.py b/pipenv/vendor/distlib/metadata.py index 2d61378e99..6d5e236090 100644 --- a/pipenv/vendor/distlib/metadata.py +++ b/pipenv/vendor/distlib/metadata.py @@ -5,7 +5,7 @@ # """Implementation of the Metadata for Python packages PEPs. -Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). """ from __future__ import unicode_literals @@ -194,38 +194,12 @@ def _has_marker(keys, markers): return '2.0' +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 _ATTR2FIELD = { - 'metadata_version': 'Metadata-Version', - 'name': 'Name', - 'version': 'Version', - 'platform': 'Platform', - 'supported_platform': 'Supported-Platform', - 'summary': 'Summary', - 'description': 'Description', - 'keywords': 'Keywords', - 'home_page': 'Home-page', - 'author': 'Author', - 'author_email': 'Author-email', - 'maintainer': 'Maintainer', - 'maintainer_email': 'Maintainer-email', - 'license': 'License', - 'classifier': 'Classifier', - 'download_url': 'Download-URL', - 'obsoletes_dist': 'Obsoletes-Dist', - 'provides_dist': 'Provides-Dist', - 'requires_dist': 'Requires-Dist', - 'setup_requires_dist': 'Setup-Requires-Dist', - 'requires_python': 'Requires-Python', - 'requires_external': 'Requires-External', - 'requires': 'Requires', - 'provides': 'Provides', - 'obsoletes': 'Obsoletes', - 'project_url': 'Project-URL', - 'private_version': 'Private-Version', - 'obsoleted_by': 'Obsoleted-By', - 'extension': 'Extension', - 'provides_extra': 'Provides-Extra', + name.lower().replace("-", "_"): name for name in _ALL_FIELDS } +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') _VERSIONS_FIELDS = ('Requires-Python',) @@ -262,7 +236,7 @@ def _get_name_and_version(name, version, for_filename=False): class LegacyMetadata(object): """The legacy metadata of a release. - Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can instantiate the class with one of these arguments (or none): - *path*, the path to a metadata file - *fileobj* give a file-like object with metadata as content @@ -381,6 +355,11 @@ def read_file(self, fileob): value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] # logger.debug('Attempting to set metadata for %s', self) # self.set_metadata_version() @@ -567,57 +546,21 @@ def todict(self, skip_missing=False): Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. """ self.set_metadata_version() - mapping_1_0 = ( - ('metadata_version', 'Metadata-Version'), - ('name', 'Name'), - ('version', 'Version'), - ('summary', 'Summary'), - ('home_page', 'Home-page'), - ('author', 'Author'), - ('author_email', 'Author-email'), - ('license', 'License'), - ('description', 'Description'), - ('keywords', 'Keywords'), - ('platform', 'Platform'), - ('classifiers', 'Classifier'), - ('download_url', 'Download-URL'), - ) + fields = _version2fieldlist(self['Metadata-Version']) data = {} - for key, field_name in mapping_1_0: + + for field_name in fields: if not skip_missing or field_name in self._fields: - data[key] = self[field_name] - - if self['Metadata-Version'] == '1.2': - mapping_1_2 = ( - ('requires_dist', 'Requires-Dist'), - ('requires_python', 'Requires-Python'), - ('requires_external', 'Requires-External'), - ('provides_dist', 'Provides-Dist'), - ('obsoletes_dist', 'Obsoletes-Dist'), - ('project_url', 'Project-URL'), - ('maintainer', 'Maintainer'), - ('maintainer_email', 'Maintainer-email'), - ) - for key, field_name in mapping_1_2: - if not skip_missing or field_name in self._fields: - if key != 'project_url': - data[key] = self[field_name] - else: - data[key] = [','.join(u) for u in self[field_name]] - - elif self['Metadata-Version'] == '1.1': - mapping_1_1 = ( - ('provides', 'Provides'), - ('requires', 'Requires'), - ('obsoletes', 'Obsoletes'), - ) - for key, field_name in mapping_1_1: - if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] return data @@ -1003,10 +946,14 @@ def _from_legacy(self): LEGACY_MAPPING = { 'name': 'Name', 'version': 'Version', - 'license': 'License', + ('extensions', 'python.details', 'license'): 'License', 'summary': 'Summary', 'description': 'Description', - 'classifiers': 'Classifier', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', } def _to_legacy(self): @@ -1034,16 +981,29 @@ def process_entries(entries): assert self._data and not self._legacy result = LegacyMetadata() nmd = self._data + # import pdb; pdb.set_trace() for nk, ok in self.LEGACY_MAPPING.items(): - if nk in nmd: - result[ok] = nmd[nk] + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d r1 = process_entries(self.run_requires + self.meta_requires) r2 = process_entries(self.build_requires + self.dev_requires) if self.extras: result['Provides-Extra'] = sorted(self.extras) result['Requires-Dist'] = sorted(r1) result['Setup-Requires-Dist'] = sorted(r2) - # TODO: other fields such as contacts + # TODO: any other fields wanted return result def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): diff --git a/pipenv/vendor/distlib/scripts.py b/pipenv/vendor/distlib/scripts.py index 5185974186..03f8f21e0f 100644 --- a/pipenv/vendor/distlib/scripts.py +++ b/pipenv/vendor/distlib/scripts.py @@ -48,7 +48,7 @@ ''' -def _enquote_executable(executable): +def enquote_executable(executable): if ' ' in executable: # make sure we quote only the executable in case of env # for example /usr/bin/env "/dir with spaces/bin/jython" @@ -63,6 +63,8 @@ def _enquote_executable(executable): executable = '"%s"' % executable return executable +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable class ScriptMaker(object): """ @@ -88,6 +90,7 @@ def __init__(self, source_dir, target_dir, add_launchers=True, self._is_nt = os.name == 'nt' or ( os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): if options.get('gui', False) and self._is_nt: # pragma: no cover @@ -185,7 +188,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): # If the user didn't specify an executable, it may be necessary to # cater for executable paths with spaces (not uncommon on Windows) if enquote: - executable = _enquote_executable(executable) + executable = enquote_executable(executable) # Issue #51: don't use fsencode, since we later try to # check that the shebang is decodable using utf-8. executable = executable.encode('utf-8') @@ -293,10 +296,10 @@ def _make_script(self, entry, filenames, options=None): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, sys.version_info[0])) + scriptnames.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - scriptnames.add('%s-%s.%s' % (name, sys.version_info[0], - sys.version_info[1])) + scriptnames.add('%s-%s.%s' % (name, self.version_info[0], + self.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: diff --git a/pipenv/vendor/distlib/wheel.py b/pipenv/vendor/distlib/wheel.py index bd179383ac..1e2c7a020c 100644 --- a/pipenv/vendor/distlib/wheel.py +++ b/pipenv/vendor/distlib/wheel.py @@ -26,7 +26,8 @@ from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, read_exports, tempdir) from .version import NormalizedVersion, UnsupportedVersionError @@ -221,10 +222,12 @@ def metadata(self): wheel_metadata = self.get_wheel_metadata(zf) wv = wheel_metadata['Wheel-Version'].split('.', 1) file_version = tuple([int(i) for i in wv]) - if file_version < (1, 1): - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] - else: - fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] result = None for fn in fns: try: @@ -299,10 +302,9 @@ def get_hash(self, data, hash_kind=None): return hash_kind, result def write_record(self, records, record_path, base): - records = list(records) # make a copy for sorting + records = list(records) # make a copy, as mutated p = to_posix(os.path.relpath(record_path, base)) records.append((p, '', '')) - records.sort() with CSVWriter(record_path) as writer: for row in records: writer.writerow(row) @@ -425,6 +427,18 @@ def build(self, paths, tags=None, wheel_version=None): ap = to_posix(os.path.join(info_dir, 'WHEEL')) archive_paths.append((ap, p)) + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + # Now, at last, RECORD. # Paths in here are archive paths - nothing else makes sense. self.write_records((distinfo, info_dir), libdir, archive_paths) @@ -476,7 +490,7 @@ def install(self, paths, maker, **kwargs): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -619,7 +633,7 @@ def install(self, paths, maker, **kwargs): for v in epdata[k].values(): s = '%s:%s' % (v.prefix, v.suffix) if v.flags: - s += ' %s' % v.flags + s += ' [%s]' % ','.join(v.flags) d[v.name] = s except Exception: logger.warning('Unable to read legacy script ' @@ -773,7 +787,7 @@ def verify(self): data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -842,7 +856,7 @@ def update(self, modifier, dest_dir=None, **kwargs): def get_version(path_map, info_dir): version = path = None - key = '%s/%s' % (info_dir, METADATA_FILENAME) + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) if key not in path_map: key = '%s/PKG-INFO' % info_dir if key in path_map: @@ -868,7 +882,7 @@ def update_version(version, path): if updated: md = Metadata(path=path) md.version = updated - legacy = not path.endswith(METADATA_FILENAME) + legacy = path.endswith(LEGACY_METADATA_FILENAME) md.write(path=path, legacy=legacy) logger.debug('Version updated from %r to %r', version, updated) diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py index d2a021a52b..e17d248f32 100644 --- a/pipenv/vendor/dotenv/cli.py +++ b/pipenv/vendor/dotenv/cli.py @@ -9,7 +9,7 @@ 'Run pip install "python-dotenv[cli]" to fix this.') sys.exit(1) -from .compat import IS_TYPE_CHECKING +from .compat import IS_TYPE_CHECKING, to_env from .main import dotenv_values, get_key, set_key, unset_key from .version import __version__ @@ -19,19 +19,23 @@ @click.group() @click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), - type=click.Path(exists=True), + type=click.Path(file_okay=True), help="Location of the .env file, defaults to .env file in current working directory.") @click.option('-q', '--quote', default='always', type=click.Choice(['always', 'never', 'auto']), help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.option('-e', '--export', default=False, + type=click.BOOL, + help="Whether to write the dot file as an executable bash script.") @click.version_option(version=__version__) @click.pass_context -def cli(ctx, file, quote): - # type: (click.Context, Any, Any) -> None +def cli(ctx, file, quote, export): + # type: (click.Context, Any, Any, Any) -> None '''This script is used to set, get or unset values from a .env file.''' ctx.obj = {} - ctx.obj['FILE'] = file ctx.obj['QUOTE'] = quote + ctx.obj['EXPORT'] = export + ctx.obj['FILE'] = file @cli.command() @@ -40,6 +44,11 @@ def list(ctx): # type: (click.Context) -> None '''Display all the stored key/value.''' file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + 'Path "%s" does not exist.' % (file), + ctx=ctx + ) dotenv_as_dict = dotenv_values(file) for k, v in dotenv_as_dict.items(): click.echo('%s=%s' % (k, v)) @@ -54,7 +63,8 @@ def set(ctx, key, value): '''Store the given key/value.''' file = ctx.obj['FILE'] quote = ctx.obj['QUOTE'] - success, key, value = set_key(file, key, value, quote) + export = ctx.obj['EXPORT'] + success, key, value = set_key(file, key, value, quote, export) if success: click.echo('%s=%s' % (key, value)) else: @@ -68,6 +78,11 @@ def get(ctx, key): # type: (click.Context, Any) -> None '''Retrieve the value for the given key.''' file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + 'Path "%s" does not exist.' % (file), + ctx=ctx + ) stored_value = get_key(file, key) if stored_value: click.echo('%s=%s' % (key, stored_value)) @@ -97,11 +112,17 @@ def run(ctx, commandline): # type: (click.Context, List[str]) -> None """Run command with environment variables present.""" file = ctx.obj['FILE'] - dotenv_as_dict = dotenv_values(file) + if not os.path.isfile(file): + raise click.BadParameter( + 'Invalid value for \'-f\' "%s" does not exist.' % (file), + ctx=ctx + ) + dotenv_as_dict = {to_env(k): to_env(v) for (k, v) in dotenv_values(file).items() if v is not None} + if not commandline: click.echo('No command given.') exit(1) - ret = run_command(commandline, dotenv_as_dict) # type: ignore + ret = run_command(commandline, dotenv_as_dict) exit(ret) diff --git a/pipenv/vendor/dotenv/compat.py b/pipenv/vendor/dotenv/compat.py index 61f555df93..f8089bf4c0 100644 --- a/pipenv/vendor/dotenv/compat.py +++ b/pipenv/vendor/dotenv/compat.py @@ -12,7 +12,7 @@ def is_type_checking(): # type: () -> bool try: from typing import TYPE_CHECKING - except ImportError: # pragma: no cover + except ImportError: return False return TYPE_CHECKING diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py index 06a210e197..58a23f3dfd 100644 --- a/pipenv/vendor/dotenv/main.py +++ b/pipenv/vendor/dotenv/main.py @@ -2,21 +2,23 @@ from __future__ import absolute_import, print_function, unicode_literals import io +import logging import os import re import shutil import sys import tempfile -import warnings from collections import OrderedDict from contextlib import contextmanager -from .compat import StringIO, PY2, to_env, IS_TYPE_CHECKING -from .parser import parse_stream +from .compat import IS_TYPE_CHECKING, PY2, StringIO, to_env +from .parser import Binding, parse_stream + +logger = logging.getLogger(__name__) if IS_TYPE_CHECKING: from typing import ( - Dict, Iterator, Match, Optional, Pattern, Union, Text, IO, Tuple + Dict, Iterable, Iterator, Match, Optional, Pattern, Union, Text, IO, Tuple ) if sys.version_info >= (3, 6): _PathLike = os.PathLike @@ -28,17 +30,39 @@ else: _StringIO = StringIO[Text] -__posix_variable = re.compile(r'\$\{[^\}]*\}') # type: Pattern[Text] +__posix_variable = re.compile( + r""" + \$\{ + (?P[^\}:]*) + (?::- + (?P[^\}]*) + )? + \} + """, + re.VERBOSE, +) # type: Pattern[Text] + + +def with_warn_for_invalid_lines(mappings): + # type: (Iterator[Binding]) -> Iterator[Binding] + for mapping in mappings: + if mapping.error: + logger.warning( + "Python-dotenv could not parse statement starting at line %s", + mapping.original.line, + ) + yield mapping class DotEnv(): - def __init__(self, dotenv_path, verbose=False, encoding=None): - # type: (Union[Text, _PathLike, _StringIO], bool, Union[None, Text]) -> None + def __init__(self, dotenv_path, verbose=False, encoding=None, interpolate=True): + # type: (Union[Text, _PathLike, _StringIO], bool, Union[None, Text], bool) -> None self.dotenv_path = dotenv_path # type: Union[Text,_PathLike, _StringIO] - self._dict = None # type: Optional[Dict[Text, Text]] + self._dict = None # type: Optional[Dict[Text, Optional[Text]]] self.verbose = verbose # type: bool self.encoding = encoding # type: Union[None, Text] + self.interpolate = interpolate # type: bool @contextmanager def _get_stream(self): @@ -50,24 +74,28 @@ def _get_stream(self): yield stream else: if self.verbose: - warnings.warn("File doesn't exist {}".format(self.dotenv_path)) # type: ignore + logger.info("Python-dotenv could not find configuration file %s.", self.dotenv_path or '.env') yield StringIO('') def dict(self): - # type: () -> Dict[Text, Text] + # type: () -> Dict[Text, Optional[Text]] """Return dotenv as dict""" if self._dict: return self._dict - values = OrderedDict(self.parse()) - self._dict = resolve_nested_variables(values) - return self._dict + if self.interpolate: + values = resolve_nested_variables(self.parse()) + else: + values = OrderedDict(self.parse()) + + self._dict = values + return values def parse(self): - # type: () -> Iterator[Tuple[Text, Text]] + # type: () -> Iterator[Tuple[Text, Optional[Text]]] with self._get_stream() as stream: - for mapping in parse_stream(stream): - if mapping.key is not None and mapping.value is not None: + for mapping in with_warn_for_invalid_lines(parse_stream(stream)): + if mapping.key is not None: yield mapping.key, mapping.value def set_as_environment_variables(self, override=False): @@ -78,7 +106,8 @@ def set_as_environment_variables(self, override=False): for k, v in self.dict().items(): if k in os.environ and not override: continue - os.environ[to_env(k)] = to_env(v) + if v is not None: + os.environ[to_env(k)] = to_env(v) return True @@ -92,7 +121,7 @@ def get(self, key): return data[key] if self.verbose: - warnings.warn("key %s not found in %s." % (key, self.dotenv_path)) # type: ignore + logger.warning("Key %s not found in %s.", key, self.dotenv_path) return None @@ -111,6 +140,9 @@ def get_key(dotenv_path, key_to_get): def rewrite(path): # type: (_PathLike) -> Iterator[Tuple[IO[Text], IO[Text]]] try: + if not os.path.isfile(path): + with io.open(path, "w+") as source: + source.write("") with tempfile.NamedTemporaryFile(mode="w+", delete=False) as dest: with io.open(path) as source: yield (source, dest) # type: ignore @@ -122,8 +154,8 @@ def rewrite(path): shutil.move(dest.name, path) -def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): - # type: (_PathLike, Text, Text, Text) -> Tuple[Optional[bool], Text, Text] +def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always", export=False): + # type: (_PathLike, Text, Text, Text, bool) -> Tuple[Optional[bool], Text, Text] """ Adds or Updates a key/value to the given .env @@ -131,24 +163,27 @@ def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): an orphan .env somewhere in the filesystem """ value_to_set = value_to_set.strip("'").strip('"') - if not os.path.exists(dotenv_path): - warnings.warn("can't write to %s - it doesn't exist." % dotenv_path) # type: ignore - return None, key_to_set, value_to_set if " " in value_to_set: quote_mode = "always" - line_template = '{}="{}"\n' if quote_mode == "always" else '{}={}\n' - line_out = line_template.format(key_to_set, value_to_set) + if quote_mode == "always": + value_out = '"{}"'.format(value_to_set.replace('"', '\\"')) + else: + value_out = value_to_set + if export: + line_out = 'export {}={}\n'.format(key_to_set, value_out) + else: + line_out = "{}={}\n".format(key_to_set, value_out) with rewrite(dotenv_path) as (source, dest): replaced = False - for mapping in parse_stream(source): + for mapping in with_warn_for_invalid_lines(parse_stream(source)): if mapping.key == key_to_set: dest.write(line_out) replaced = True else: - dest.write(mapping.original) + dest.write(mapping.original.string) if not replaced: dest.write(line_out) @@ -164,48 +199,45 @@ def unset_key(dotenv_path, key_to_unset, quote_mode="always"): If the given key doesn't exist in the .env, fails """ if not os.path.exists(dotenv_path): - warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path) # type: ignore + logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) return None, key_to_unset removed = False with rewrite(dotenv_path) as (source, dest): - for mapping in parse_stream(source): + for mapping in with_warn_for_invalid_lines(parse_stream(source)): if mapping.key == key_to_unset: removed = True else: - dest.write(mapping.original) + dest.write(mapping.original.string) if not removed: - warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) # type: ignore + logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path) return None, key_to_unset return removed, key_to_unset def resolve_nested_variables(values): - # type: (Dict[Text, Text]) -> Dict[Text, Text] - def _replacement(name): - # type: (Text) -> Text - """ - get appropriate value for a variable name. - first search in environ, if not found, - then look into the dotenv variables - """ - ret = os.getenv(name, new_values.get(name, "")) - return ret - - def _re_sub_callback(match_object): + # type: (Iterable[Tuple[Text, Optional[Text]]]) -> Dict[Text, Optional[Text]] + def _replacement(name, default): + # type: (Text, Optional[Text]) -> Text + default = default if default is not None else "" + ret = new_values.get(name, os.getenv(name, default)) + return ret # type: ignore + + def _re_sub_callback(match): # type: (Match[Text]) -> Text """ From a match object gets the variable name and returns the correct replacement """ - return _replacement(match_object.group()[2:-1]) + matches = match.groupdict() + return _replacement(name=matches["name"], default=matches["default"]) # type: ignore new_values = {} - for k, v in values.items(): - new_values[k] = __posix_variable.sub(_re_sub_callback, v) + for (k, v) in values: + new_values[k] = __posix_variable.sub(_re_sub_callback, v) if v is not None else None return new_values @@ -242,7 +274,7 @@ def _is_interactive(): main = __import__('__main__', None, None, fromlist=['__file__']) return not hasattr(main, '__file__') - if usecwd or _is_interactive(): + if usecwd or _is_interactive() or getattr(sys, 'frozen', False): # Should work without __file__, e.g. in REPL or IPython notebook. path = os.getcwd() else: @@ -257,6 +289,7 @@ def _is_interactive(): current_file = __file__ while frame.f_code.co_filename == current_file: + assert frame.f_back is not None frame = frame.f_back frame_filename = frame.f_code.co_filename path = os.path.dirname(os.path.abspath(frame_filename)) @@ -272,8 +305,8 @@ def _is_interactive(): return '' -def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, **kwargs): - # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, Union[None, Text]) -> bool +def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, interpolate=True, **kwargs): + # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, bool, Union[None, Text]) -> bool """Parse a .env file and then load all the variables found as environment variables. - *dotenv_path*: absolute or relative path to .env file. @@ -283,10 +316,10 @@ def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, ** Defaults to `False`. """ f = dotenv_path or stream or find_dotenv() - return DotEnv(f, verbose=verbose, **kwargs).set_as_environment_variables(override=override) + return DotEnv(f, verbose=verbose, interpolate=interpolate, **kwargs).set_as_environment_variables(override=override) -def dotenv_values(dotenv_path=None, stream=None, verbose=False, **kwargs): - # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, Union[None, Text]) -> Dict[Text, Text] +def dotenv_values(dotenv_path=None, stream=None, verbose=False, interpolate=True, **kwargs): + # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, Union[None, Text]) -> Dict[Text, Optional[Text]] # noqa: E501 f = dotenv_path or stream or find_dotenv() - return DotEnv(f, verbose=verbose, **kwargs).dict() + return DotEnv(f, verbose=verbose, interpolate=interpolate, **kwargs).dict() diff --git a/pipenv/vendor/dotenv/parser.py b/pipenv/vendor/dotenv/parser.py index 034ebfded8..5cb1cdfa65 100644 --- a/pipenv/vendor/dotenv/parser.py +++ b/pipenv/vendor/dotenv/parser.py @@ -1,8 +1,7 @@ import codecs import re -from .compat import to_text, IS_TYPE_CHECKING - +from .compat import IS_TYPE_CHECKING, to_text if IS_TYPE_CHECKING: from typing import ( # noqa:F401 @@ -16,16 +15,18 @@ def make_regex(string, extra_flags=0): return re.compile(to_text(string), re.UNICODE | extra_flags) -_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) +_newline = make_regex(r"(\r\n|\n|\r)") +_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) +_whitespace = make_regex(r"[^\S\r\n]*") _export = make_regex(r"(?:export[^\S\r\n]+)?") _single_quoted_key = make_regex(r"'([^']+)'") _unquoted_key = make_regex(r"([^=\#\s]+)") -_equal_sign = make_regex(r"[^\S\r\n]*=[^\S\r\n]*") +_equal_sign = make_regex(r"(=[^\S\r\n]*)") _single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") _double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') -_unquoted_value_part = make_regex(r"([^ \r\n]*)") -_comment = make_regex(r"(?:\s*#[^\r\n]*)?") -_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r)?") +_unquoted_value = make_regex(r"([^\r\n]*)") +_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") +_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") _rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") _double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") _single_quote_escapes = make_regex(r"\\[\\']") @@ -36,14 +37,64 @@ def make_regex(string, extra_flags=0): # when we are type checking, and the linter is upset if we # re-import import typing - Binding = typing.NamedTuple("Binding", [("key", typing.Optional[typing.Text]), - ("value", typing.Optional[typing.Text]), - ("original", typing.Text)]) -except ImportError: # pragma: no cover + + Original = typing.NamedTuple( + "Original", + [ + ("string", typing.Text), + ("line", int), + ], + ) + + Binding = typing.NamedTuple( + "Binding", + [ + ("key", typing.Optional[typing.Text]), + ("value", typing.Optional[typing.Text]), + ("original", Original), + ("error", bool), + ], + ) +except (ImportError, AttributeError): from collections import namedtuple - Binding = namedtuple("Binding", ["key", # type: ignore - "value", - "original"]) # type: Tuple[Optional[Text], Optional[Text], Text] + Original = namedtuple( # type: ignore + "Original", + [ + "string", + "line", + ], + ) + Binding = namedtuple( # type: ignore + "Binding", + [ + "key", + "value", + "original", + "error", + ], + ) + + +class Position: + def __init__(self, chars, line): + # type: (int, int) -> None + self.chars = chars + self.line = line + + @classmethod + def start(cls): + # type: () -> Position + return cls(chars=0, line=1) + + def set(self, other): + # type: (Position) -> None + self.chars = other.chars + self.line = other.line + + def advance(self, string): + # type: (Text) -> None + self.chars += len(string) + self.line += len(re.findall(_newline, string)) class Error(Exception): @@ -54,39 +105,42 @@ class Reader: def __init__(self, stream): # type: (IO[Text]) -> None self.string = stream.read() - self.position = 0 - self.mark = 0 + self.position = Position.start() + self.mark = Position.start() def has_next(self): # type: () -> bool - return self.position < len(self.string) + return self.position.chars < len(self.string) def set_mark(self): # type: () -> None - self.mark = self.position + self.mark.set(self.position) def get_marked(self): - # type: () -> Text - return self.string[self.mark:self.position] + # type: () -> Original + return Original( + string=self.string[self.mark.chars:self.position.chars], + line=self.mark.line, + ) def peek(self, count): # type: (int) -> Text - return self.string[self.position:self.position + count] + return self.string[self.position.chars:self.position.chars + count] def read(self, count): # type: (int) -> Text - result = self.string[self.position:self.position + count] + result = self.string[self.position.chars:self.position.chars + count] if len(result) < count: raise Error("read: End of string") - self.position += count + self.position.advance(result) return result def read_regex(self, regex): # type: (Pattern[Text]) -> Sequence[Text] - match = regex.match(self.string, self.position) + match = regex.match(self.string, self.position.chars) if match is None: raise Error("read_regex: Pattern not found") - self.position = match.end() + self.position.advance(self.string[match.start():match.end()]) return match.groups() @@ -100,9 +154,11 @@ def decode_match(match): def parse_key(reader): - # type: (Reader) -> Text + # type: (Reader) -> Optional[Text] char = reader.peek(1) - if char == "'": + if char == "#": + return None + elif char == "'": (key,) = reader.read_regex(_single_quoted_key) else: (key,) = reader.read_regex(_unquoted_key) @@ -111,14 +167,8 @@ def parse_key(reader): def parse_unquoted_value(reader): # type: (Reader) -> Text - value = u"" - while True: - (part,) = reader.read_regex(_unquoted_value_part) - value += part - after = reader.peek(2) - if len(after) < 2 or after[0] in u"\r\n" or after[1] in u" #\r\n": - return value - value += reader.read(2) + (part,) = reader.read_regex(_unquoted_value) + return re.sub(r"\s+#.*", "", part).rstrip() def parse_value(reader): @@ -140,24 +190,42 @@ def parse_binding(reader): # type: (Reader) -> Binding reader.set_mark() try: - reader.read_regex(_whitespace) + reader.read_regex(_multiline_whitespace) + if not reader.has_next(): + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=False, + ) reader.read_regex(_export) key = parse_key(reader) - reader.read_regex(_equal_sign) - value = parse_value(reader) + reader.read_regex(_whitespace) + if reader.peek(1) == "=": + reader.read_regex(_equal_sign) + value = parse_value(reader) # type: Optional[Text] + else: + value = None reader.read_regex(_comment) reader.read_regex(_end_of_line) - return Binding(key=key, value=value, original=reader.get_marked()) + return Binding( + key=key, + value=value, + original=reader.get_marked(), + error=False, + ) except Error: reader.read_regex(_rest_of_line) - return Binding(key=None, value=None, original=reader.get_marked()) + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=True, + ) def parse_stream(stream): - # type:(IO[Text]) -> Iterator[Binding] + # type: (IO[Text]) -> Iterator[Binding] reader = Reader(stream) while reader.has_next(): - try: - yield parse_binding(reader) - except Error: - return + yield parse_binding(reader) diff --git a/pipenv/vendor/dotenv/version.py b/pipenv/vendor/dotenv/version.py index b2385cb400..9da2f8fcca 100644 --- a/pipenv/vendor/dotenv/version.py +++ b/pipenv/vendor/dotenv/version.py @@ -1 +1 @@ -__version__ = "0.10.3" +__version__ = "0.15.0" diff --git a/pipenv/vendor/first.py b/pipenv/vendor/first.py index 479e0bad7e..8cf9d2d1c9 100644 --- a/pipenv/vendor/first.py +++ b/pipenv/vendor/first.py @@ -33,10 +33,10 @@ """ __title__ = 'first' -__version__ = '2.0.1' +__version__ = '2.0.2' __author__ = 'Hynek Schlawack' __license__ = 'MIT' -__copyright__ = 'Copyright 2012–2013 Hynek Schlawack' +__copyright__ = 'Copyright 2012 Hynek Schlawack' def first(iterable, default=None, key=None): diff --git a/pipenv/vendor/idna/core.py b/pipenv/vendor/idna/core.py index 9c3bba2ad7..41ec5c711d 100644 --- a/pipenv/vendor/idna/core.py +++ b/pipenv/vendor/idna/core.py @@ -300,6 +300,8 @@ def ulabel(label): label = label.lower() if label.startswith(_alabel_prefix): label = label[len(_alabel_prefix):] + if not label: + raise IDNAError('Malformed A-label, no Punycode eligible content found') if label.decode('ascii')[-1] == '-': raise IDNAError('A-label must not end with a hyphen') else: diff --git a/pipenv/vendor/idna/idnadata.py b/pipenv/vendor/idna/idnadata.py index 2b81c522cf..a284e4c84a 100644 --- a/pipenv/vendor/idna/idnadata.py +++ b/pipenv/vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "12.1.0" +__version__ = "13.0.0" scripts = { 'Greek': ( 0x37000000374, @@ -48,16 +48,18 @@ 0x300700003008, 0x30210000302a, 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xf9000000fa6e, 0xfa700000fada, - 0x200000002a6d7, + 0x16ff000016ff2, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, + 0x300000003134b, ), 'Hebrew': ( 0x591000005c8, @@ -389,9 +391,9 @@ 0x853: 68, 0x854: 82, 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, + 0x856: 82, + 0x857: 82, + 0x858: 82, 0x860: 68, 0x861: 85, 0x862: 68, @@ -432,6 +434,16 @@ 0x8bb: 68, 0x8bc: 68, 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -756,6 +768,34 @@ 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, 0x110bd: 85, 0x110cd: 85, 0x1e900: 68, @@ -1129,7 +1169,7 @@ 0x8400000085c, 0x8600000086b, 0x8a0000008b5, - 0x8b6000008be, + 0x8b6000008c8, 0x8d3000008e2, 0x8e300000958, 0x96000000964, @@ -1188,7 +1228,7 @@ 0xb3c00000b45, 0xb4700000b49, 0xb4b00000b4e, - 0xb5600000b58, + 0xb5500000b58, 0xb5f00000b64, 0xb6600000b70, 0xb7100000b72, @@ -1233,8 +1273,7 @@ 0xce000000ce4, 0xce600000cf0, 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, + 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, 0xd4600000d49, @@ -1243,7 +1282,7 @@ 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, - 0xd8200000d84, + 0xd8100000d84, 0xd8500000d97, 0xd9a00000db2, 0xdb300000dbc, @@ -1358,6 +1397,7 @@ 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, + 0x1abf00001ac1, 0x1b0000001b4c, 0x1b5000001b5a, 0x1b6b00001b74, @@ -1609,10 +1649,10 @@ 0x30a1000030fb, 0x30fc000030ff, 0x310500003130, - 0x31a0000031bb, + 0x31a0000031c0, 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e0000009ffd, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1727,8 +1767,11 @@ 0xa7bd0000a7be, 0xa7bf0000a7c0, 0xa7c30000a7c4, - 0xa7f70000a7f8, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7f60000a7f8, 0xa7fa0000a828, + 0xa82c0000a82d, 0xa8400000a874, 0xa8800000a8c6, 0xa8d00000a8da, @@ -1753,7 +1796,7 @@ 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab68, + 0xab600000ab6a, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1827,9 +1870,13 @@ 0x10cc000010cf3, 0x10d0000010d28, 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, 0x10f0000010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10fb000010fc5, 0x10fe000010ff7, 0x1100000011047, 0x1106600011070, @@ -1838,12 +1885,12 @@ 0x110f0000110fa, 0x1110000011135, 0x1113600011140, - 0x1114400011147, + 0x1114400011148, 0x1115000011174, 0x1117600011177, 0x11180000111c5, 0x111c9000111cd, - 0x111d0000111db, + 0x111ce000111db, 0x111dc000111dd, 0x1120000011212, 0x1121300011238, @@ -1872,7 +1919,7 @@ 0x1137000011375, 0x114000001144b, 0x114500001145a, - 0x1145e00011460, + 0x1145e00011462, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1889,7 +1936,14 @@ 0x117300001173a, 0x118000001183b, 0x118c0000118ea, - 0x118ff00011900, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, 0x119a0000119a8, 0x119aa000119d8, 0x119da000119e2, @@ -1920,6 +1974,7 @@ 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, 0x130000001342f, @@ -1939,9 +1994,11 @@ 0x16f4f00016f88, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x16fe300016fe4, + 0x16fe300016fe5, + 0x16ff000016ff2, 0x17000000187f8, - 0x1880000018af3, + 0x1880000018cd6, + 0x18d0000018d09, 0x1b0000001b11f, 0x1b1500001b153, 0x1b1640001b168, @@ -1971,11 +2028,13 @@ 0x1e8d00001e8d7, 0x1e9220001e94c, 0x1e9500001e95a, - 0x200000002a6d7, + 0x1fbf00001fbfa, + 0x200000002a6de, 0x2a7000002b735, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x300000003134b, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/pipenv/vendor/idna/package_data.py b/pipenv/vendor/idna/package_data.py index b5d8216558..ce1c521d23 100644 --- a/pipenv/vendor/idna/package_data.py +++ b/pipenv/vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.9' +__version__ = '2.10' diff --git a/pipenv/vendor/idna/uts46data.py b/pipenv/vendor/idna/uts46data.py index 2711136d7d..3766dd49f6 100644 --- a/pipenv/vendor/idna/uts46data.py +++ b/pipenv/vendor/idna/uts46data.py @@ -4,7 +4,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = "12.1.0" +__version__ = "13.0.0" def _seg_0(): return [ (0x0, '3'), @@ -1074,7 +1074,7 @@ def _seg_10(): (0x8A0, 'V'), (0x8B5, 'X'), (0x8B6, 'V'), - (0x8BE, 'X'), + (0x8C8, 'X'), (0x8D3, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), @@ -1205,7 +1205,7 @@ def _seg_11(): (0xB49, 'X'), (0xB4B, 'V'), (0xB4E, 'X'), - (0xB56, 'V'), + (0xB55, 'V'), (0xB58, 'X'), (0xB5C, 'M', u'ଡ଼'), (0xB5D, 'M', u'ଢ଼'), @@ -1299,8 +1299,6 @@ def _seg_12(): (0xCF1, 'V'), (0xCF3, 'X'), (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), (0xD11, 'X'), @@ -1314,7 +1312,7 @@ def _seg_12(): (0xD64, 'X'), (0xD66, 'V'), (0xD80, 'X'), - (0xD82, 'V'), + (0xD81, 'V'), (0xD84, 'X'), (0xD85, 'V'), (0xD97, 'X'), @@ -1355,12 +1353,12 @@ def _seg_12(): (0xEA5, 'V'), (0xEA6, 'X'), (0xEA7, 'V'), + (0xEB3, 'M', u'ໍາ'), + (0xEB4, 'V'), ] def _seg_13(): return [ - (0xEB3, 'M', u'ໍາ'), - (0xEB4, 'V'), (0xEBE, 'X'), (0xEC0, 'V'), (0xEC5, 'X'), @@ -1459,12 +1457,12 @@ def _seg_13(): (0x12C8, 'V'), (0x12D7, 'X'), (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), ] def _seg_14(): return [ - (0x1311, 'X'), - (0x1312, 'V'), (0x1316, 'X'), (0x1318, 'V'), (0x135B, 'X'), @@ -1553,7 +1551,7 @@ def _seg_14(): (0x1AA0, 'V'), (0x1AAE, 'X'), (0x1AB0, 'V'), - (0x1ABF, 'X'), + (0x1AC1, 'X'), (0x1B00, 'V'), (0x1B4C, 'X'), (0x1B50, 'V'), @@ -1563,12 +1561,12 @@ def _seg_14(): (0x1BFC, 'V'), (0x1C38, 'X'), (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), ] def _seg_15(): return [ - (0x1C4A, 'X'), - (0x1C4D, 'V'), (0x1C80, 'M', u'в'), (0x1C81, 'M', u'д'), (0x1C82, 'M', u'о'), @@ -1667,12 +1665,12 @@ def _seg_15(): (0x1D4E, 'V'), (0x1D4F, 'M', u'k'), (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'ŋ'), + (0x1D52, 'M', u'o'), ] def _seg_16(): return [ - (0x1D51, 'M', u'ŋ'), - (0x1D52, 'M', u'o'), (0x1D53, 'M', u'ɔ'), (0x1D54, 'M', u'ᴖ'), (0x1D55, 'M', u'ᴗ'), @@ -1771,12 +1769,12 @@ def _seg_16(): (0x1E1C, 'M', u'ḝ'), (0x1E1D, 'V'), (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), ] def _seg_17(): return [ - (0x1E1F, 'V'), - (0x1E20, 'M', u'ḡ'), (0x1E21, 'V'), (0x1E22, 'M', u'ḣ'), (0x1E23, 'V'), @@ -1875,12 +1873,12 @@ def _seg_17(): (0x1E80, 'M', u'ẁ'), (0x1E81, 'V'), (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), ] def _seg_18(): return [ - (0x1E83, 'V'), - (0x1E84, 'M', u'ẅ'), (0x1E85, 'V'), (0x1E86, 'M', u'ẇ'), (0x1E87, 'V'), @@ -1979,12 +1977,12 @@ def _seg_18(): (0x1EE9, 'V'), (0x1EEA, 'M', u'ừ'), (0x1EEB, 'V'), + (0x1EEC, 'M', u'ử'), + (0x1EED, 'V'), ] def _seg_19(): return [ - (0x1EEC, 'M', u'ử'), - (0x1EED, 'V'), (0x1EEE, 'M', u'ữ'), (0x1EEF, 'V'), (0x1EF0, 'M', u'ự'), @@ -2083,12 +2081,12 @@ def _seg_19(): (0x1F80, 'M', u'ἀι'), (0x1F81, 'M', u'ἁι'), (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), ] def _seg_20(): return [ - (0x1F83, 'M', u'ἃι'), - (0x1F84, 'M', u'ἄι'), (0x1F85, 'M', u'ἅι'), (0x1F86, 'M', u'ἆι'), (0x1F87, 'M', u'ἇι'), @@ -2187,12 +2185,12 @@ def _seg_20(): (0x1FEE, '3', u' ̈́'), (0x1FEF, '3', u'`'), (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), ] def _seg_21(): return [ - (0x1FF2, 'M', u'ὼι'), - (0x1FF3, 'M', u'ωι'), (0x1FF4, 'M', u'ώι'), (0x1FF5, 'X'), (0x1FF6, 'V'), @@ -2291,12 +2289,12 @@ def _seg_21(): (0x20C0, 'X'), (0x20D0, 'V'), (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), ] def _seg_22(): return [ - (0x2100, '3', u'a/c'), - (0x2101, '3', u'a/s'), (0x2102, 'M', u'c'), (0x2103, 'M', u'°c'), (0x2104, 'V'), @@ -2395,12 +2393,12 @@ def _seg_22(): (0x2175, 'M', u'vi'), (0x2176, 'M', u'vii'), (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), ] def _seg_23(): return [ - (0x2178, 'M', u'ix'), - (0x2179, 'M', u'x'), (0x217A, 'M', u'xi'), (0x217B, 'M', u'xii'), (0x217C, 'M', u'l'), @@ -2499,12 +2497,12 @@ def _seg_23(): (0x24B5, '3', u'(z)'), (0x24B6, 'M', u'a'), (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), ] def _seg_24(): return [ - (0x24B8, 'M', u'c'), - (0x24B9, 'M', u'd'), (0x24BA, 'M', u'e'), (0x24BB, 'M', u'f'), (0x24BC, 'M', u'g'), @@ -2566,7 +2564,7 @@ def _seg_24(): (0x2B74, 'X'), (0x2B76, 'V'), (0x2B96, 'X'), - (0x2B98, 'V'), + (0x2B97, 'V'), (0x2C00, 'M', u'ⰰ'), (0x2C01, 'M', u'ⰱ'), (0x2C02, 'M', u'ⰲ'), @@ -2603,12 +2601,12 @@ def _seg_24(): (0x2C21, 'M', u'ⱑ'), (0x2C22, 'M', u'ⱒ'), (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'ⱔ'), + (0x2C25, 'M', u'ⱕ'), ] def _seg_25(): return [ - (0x2C24, 'M', u'ⱔ'), - (0x2C25, 'M', u'ⱕ'), (0x2C26, 'M', u'ⱖ'), (0x2C27, 'M', u'ⱗ'), (0x2C28, 'M', u'ⱘ'), @@ -2707,12 +2705,12 @@ def _seg_25(): (0x2CBA, 'M', u'ⲻ'), (0x2CBB, 'V'), (0x2CBC, 'M', u'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), ] def _seg_26(): return [ - (0x2CBD, 'V'), - (0x2CBE, 'M', u'ⲿ'), (0x2CBF, 'V'), (0x2CC0, 'M', u'ⳁ'), (0x2CC1, 'V'), @@ -2787,7 +2785,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E50, 'X'), + (0x2E53, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2811,12 +2809,12 @@ def _seg_26(): (0x2F0D, 'M', u'冖'), (0x2F0E, 'M', u'冫'), (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), ] def _seg_27(): return [ - (0x2F10, 'M', u'凵'), - (0x2F11, 'M', u'刀'), (0x2F12, 'M', u'力'), (0x2F13, 'M', u'勹'), (0x2F14, 'M', u'匕'), @@ -2915,12 +2913,12 @@ def _seg_27(): (0x2F71, 'M', u'禸'), (0x2F72, 'M', u'禾'), (0x2F73, 'M', u'穴'), + (0x2F74, 'M', u'立'), + (0x2F75, 'M', u'竹'), ] def _seg_28(): return [ - (0x2F74, 'M', u'立'), - (0x2F75, 'M', u'竹'), (0x2F76, 'M', u'米'), (0x2F77, 'M', u'糸'), (0x2F78, 'M', u'缶'), @@ -3019,12 +3017,12 @@ def _seg_28(): (0x2FD5, 'M', u'龠'), (0x2FD6, 'X'), (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), ] def _seg_29(): return [ - (0x3001, 'V'), - (0x3002, 'M', u'.'), (0x3003, 'V'), (0x3036, 'M', u'〒'), (0x3037, 'V'), @@ -3123,12 +3121,12 @@ def _seg_29(): (0x317C, 'M', u'ᄯ'), (0x317D, 'M', u'ᄲ'), (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'ᅀ'), + (0x3180, 'M', u'ᅇ'), ] def _seg_30(): return [ - (0x317F, 'M', u'ᅀ'), - (0x3180, 'M', u'ᅇ'), (0x3181, 'M', u'ᅌ'), (0x3182, 'M', u'ᇱ'), (0x3183, 'M', u'ᇲ'), @@ -3160,8 +3158,6 @@ def _seg_30(): (0x319E, 'M', u'地'), (0x319F, 'M', u'人'), (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), (0x31E4, 'X'), (0x31F0, 'V'), (0x3200, '3', u'(ᄀ)'), @@ -3227,14 +3223,14 @@ def _seg_30(): (0x323C, '3', u'(監)'), (0x323D, '3', u'(企)'), (0x323E, '3', u'(資)'), - ] - -def _seg_31(): - return [ (0x323F, '3', u'(協)'), (0x3240, '3', u'(祭)'), (0x3241, '3', u'(休)'), (0x3242, '3', u'(自)'), + ] + +def _seg_31(): + return [ (0x3243, '3', u'(至)'), (0x3244, 'M', u'問'), (0x3245, 'M', u'幼'), @@ -3331,14 +3327,14 @@ def _seg_31(): (0x32A7, 'M', u'左'), (0x32A8, 'M', u'右'), (0x32A9, 'M', u'医'), - ] - -def _seg_32(): - return [ (0x32AA, 'M', u'宗'), (0x32AB, 'M', u'学'), (0x32AC, 'M', u'監'), (0x32AD, 'M', u'企'), + ] + +def _seg_32(): + return [ (0x32AE, 'M', u'資'), (0x32AF, 'M', u'協'), (0x32B0, 'M', u'夜'), @@ -3435,14 +3431,14 @@ def _seg_32(): (0x330B, 'M', u'カイリ'), (0x330C, 'M', u'カラット'), (0x330D, 'M', u'カロリー'), - ] - -def _seg_33(): - return [ (0x330E, 'M', u'ガロン'), (0x330F, 'M', u'ガンマ'), (0x3310, 'M', u'ギガ'), (0x3311, 'M', u'ギニー'), + ] + +def _seg_33(): + return [ (0x3312, 'M', u'キュリー'), (0x3313, 'M', u'ギルダー'), (0x3314, 'M', u'キロ'), @@ -3539,14 +3535,14 @@ def _seg_33(): (0x336F, 'M', u'23点'), (0x3370, 'M', u'24点'), (0x3371, 'M', u'hpa'), - ] - -def _seg_34(): - return [ (0x3372, 'M', u'da'), (0x3373, 'M', u'au'), (0x3374, 'M', u'bar'), (0x3375, 'M', u'ov'), + ] + +def _seg_34(): + return [ (0x3376, 'M', u'pc'), (0x3377, 'M', u'dm'), (0x3378, 'M', u'dm2'), @@ -3643,14 +3639,14 @@ def _seg_34(): (0x33D3, 'M', u'lx'), (0x33D4, 'M', u'mb'), (0x33D5, 'M', u'mil'), - ] - -def _seg_35(): - return [ (0x33D6, 'M', u'mol'), (0x33D7, 'M', u'ph'), (0x33D8, 'X'), (0x33D9, 'M', u'ppm'), + ] + +def _seg_35(): + return [ (0x33DA, 'M', u'pr'), (0x33DB, 'M', u'sr'), (0x33DC, 'M', u'sv'), @@ -3690,9 +3686,7 @@ def _seg_35(): (0x33FE, 'M', u'31日'), (0x33FF, 'M', u'gal'), (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), + (0x9FFD, 'X'), (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), @@ -3747,16 +3741,16 @@ def _seg_35(): (0xA66D, 'V'), (0xA680, 'M', u'ꚁ'), (0xA681, 'V'), - ] - -def _seg_36(): - return [ (0xA682, 'M', u'ꚃ'), (0xA683, 'V'), (0xA684, 'M', u'ꚅ'), (0xA685, 'V'), (0xA686, 'M', u'ꚇ'), (0xA687, 'V'), + ] + +def _seg_36(): + return [ (0xA688, 'M', u'ꚉ'), (0xA689, 'V'), (0xA68A, 'M', u'ꚋ'), @@ -3851,16 +3845,16 @@ def _seg_36(): (0xA766, 'M', u'ꝧ'), (0xA767, 'V'), (0xA768, 'M', u'ꝩ'), - ] - -def _seg_37(): - return [ (0xA769, 'V'), (0xA76A, 'M', u'ꝫ'), (0xA76B, 'V'), (0xA76C, 'M', u'ꝭ'), (0xA76D, 'V'), (0xA76E, 'M', u'ꝯ'), + ] + +def _seg_37(): + return [ (0xA76F, 'V'), (0xA770, 'M', u'ꝯ'), (0xA771, 'V'), @@ -3935,12 +3929,17 @@ def _seg_37(): (0xA7C4, 'M', u'ꞔ'), (0xA7C5, 'M', u'ʂ'), (0xA7C6, 'M', u'ᶎ'), - (0xA7C7, 'X'), - (0xA7F7, 'V'), + (0xA7C7, 'M', u'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', u'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7F5, 'M', u'ꟶ'), + (0xA7F6, 'V'), (0xA7F8, 'M', u'ħ'), (0xA7F9, 'M', u'œ'), (0xA7FA, 'V'), - (0xA82C, 'X'), + (0xA82D, 'X'), (0xA830, 'V'), (0xA83A, 'X'), (0xA840, 'V'), @@ -3955,11 +3954,11 @@ def _seg_37(): (0xA97D, 'X'), (0xA980, 'V'), (0xA9CE, 'X'), + (0xA9CF, 'V'), ] def _seg_38(): return [ - (0xA9CF, 'V'), (0xA9DA, 'X'), (0xA9DE, 'V'), (0xA9FF, 'X'), @@ -3989,7 +3988,9 @@ def _seg_38(): (0xAB5E, 'M', u'ɫ'), (0xAB5F, 'M', u'ꭒ'), (0xAB60, 'V'), - (0xAB68, 'X'), + (0xAB69, 'M', u'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), (0xAB70, 'M', u'Ꭰ'), (0xAB71, 'M', u'Ꭱ'), (0xAB72, 'M', u'Ꭲ'), @@ -4058,11 +4059,11 @@ def _seg_38(): (0xABB1, 'M', u'Ꮱ'), (0xABB2, 'M', u'Ꮲ'), (0xABB3, 'M', u'Ꮳ'), - (0xABB4, 'M', u'Ꮴ'), ] def _seg_39(): return [ + (0xABB4, 'M', u'Ꮴ'), (0xABB5, 'M', u'Ꮵ'), (0xABB6, 'M', u'Ꮶ'), (0xABB7, 'M', u'Ꮷ'), @@ -4162,11 +4163,11 @@ def _seg_39(): (0xF94C, 'M', u'樓'), (0xF94D, 'M', u'淚'), (0xF94E, 'M', u'漏'), - (0xF94F, 'M', u'累'), ] def _seg_40(): return [ + (0xF94F, 'M', u'累'), (0xF950, 'M', u'縷'), (0xF951, 'M', u'陋'), (0xF952, 'M', u'勒'), @@ -4266,11 +4267,11 @@ def _seg_40(): (0xF9B0, 'M', u'聆'), (0xF9B1, 'M', u'鈴'), (0xF9B2, 'M', u'零'), - (0xF9B3, 'M', u'靈'), ] def _seg_41(): return [ + (0xF9B3, 'M', u'靈'), (0xF9B4, 'M', u'領'), (0xF9B5, 'M', u'例'), (0xF9B6, 'M', u'禮'), @@ -4370,11 +4371,11 @@ def _seg_41(): (0xFA16, 'M', u'猪'), (0xFA17, 'M', u'益'), (0xFA18, 'M', u'礼'), - (0xFA19, 'M', u'神'), ] def _seg_42(): return [ + (0xFA19, 'M', u'神'), (0xFA1A, 'M', u'祥'), (0xFA1B, 'M', u'福'), (0xFA1C, 'M', u'靖'), @@ -4474,11 +4475,11 @@ def _seg_42(): (0xFA7F, 'M', u'奔'), (0xFA80, 'M', u'婢'), (0xFA81, 'M', u'嬨'), - (0xFA82, 'M', u'廒'), ] def _seg_43(): return [ + (0xFA82, 'M', u'廒'), (0xFA83, 'M', u'廙'), (0xFA84, 'M', u'彩'), (0xFA85, 'M', u'徭'), @@ -4578,11 +4579,11 @@ def _seg_43(): (0xFB14, 'M', u'մե'), (0xFB15, 'M', u'մի'), (0xFB16, 'M', u'վն'), - (0xFB17, 'M', u'մխ'), ] def _seg_44(): return [ + (0xFB17, 'M', u'մխ'), (0xFB18, 'X'), (0xFB1D, 'M', u'יִ'), (0xFB1E, 'V'), @@ -4682,11 +4683,11 @@ def _seg_44(): (0xFBEE, 'M', u'ئو'), (0xFBF0, 'M', u'ئۇ'), (0xFBF2, 'M', u'ئۆ'), - (0xFBF4, 'M', u'ئۈ'), ] def _seg_45(): return [ + (0xFBF4, 'M', u'ئۈ'), (0xFBF6, 'M', u'ئې'), (0xFBF9, 'M', u'ئى'), (0xFBFC, 'M', u'ی'), @@ -4786,11 +4787,11 @@ def _seg_45(): (0xFC5D, 'M', u'ىٰ'), (0xFC5E, '3', u' ٌّ'), (0xFC5F, '3', u' ٍّ'), - (0xFC60, '3', u' َّ'), ] def _seg_46(): return [ + (0xFC60, '3', u' َّ'), (0xFC61, '3', u' ُّ'), (0xFC62, '3', u' ِّ'), (0xFC63, '3', u' ّٰ'), @@ -4890,11 +4891,11 @@ def _seg_46(): (0xFCC1, 'M', u'فم'), (0xFCC2, 'M', u'قح'), (0xFCC3, 'M', u'قم'), - (0xFCC4, 'M', u'كج'), ] def _seg_47(): return [ + (0xFCC4, 'M', u'كج'), (0xFCC5, 'M', u'كح'), (0xFCC6, 'M', u'كخ'), (0xFCC7, 'M', u'كل'), @@ -4994,11 +4995,11 @@ def _seg_47(): (0xFD25, 'M', u'شج'), (0xFD26, 'M', u'شح'), (0xFD27, 'M', u'شخ'), - (0xFD28, 'M', u'شم'), ] def _seg_48(): return [ + (0xFD28, 'M', u'شم'), (0xFD29, 'M', u'شر'), (0xFD2A, 'M', u'سر'), (0xFD2B, 'M', u'صر'), @@ -5098,11 +5099,11 @@ def _seg_48(): (0xFDAC, 'M', u'لجي'), (0xFDAD, 'M', u'لمي'), (0xFDAE, 'M', u'يحي'), - (0xFDAF, 'M', u'يجي'), ] def _seg_49(): return [ + (0xFDAF, 'M', u'يجي'), (0xFDB0, 'M', u'يمي'), (0xFDB1, 'M', u'ممي'), (0xFDB2, 'M', u'قمي'), @@ -5202,11 +5203,11 @@ def _seg_49(): (0xFE64, '3', u'<'), (0xFE65, '3', u'>'), (0xFE66, '3', u'='), - (0xFE67, 'X'), ] def _seg_50(): return [ + (0xFE67, 'X'), (0xFE68, '3', u'\\'), (0xFE69, '3', u'$'), (0xFE6A, '3', u'%'), @@ -5306,11 +5307,11 @@ def _seg_50(): (0xFF21, 'M', u'a'), (0xFF22, 'M', u'b'), (0xFF23, 'M', u'c'), - (0xFF24, 'M', u'd'), ] def _seg_51(): return [ + (0xFF24, 'M', u'd'), (0xFF25, 'M', u'e'), (0xFF26, 'M', u'f'), (0xFF27, 'M', u'g'), @@ -5410,11 +5411,11 @@ def _seg_51(): (0xFF85, 'M', u'ナ'), (0xFF86, 'M', u'ニ'), (0xFF87, 'M', u'ヌ'), - (0xFF88, 'M', u'ネ'), ] def _seg_52(): return [ + (0xFF88, 'M', u'ネ'), (0xFF89, 'M', u'ノ'), (0xFF8A, 'M', u'ハ'), (0xFF8B, 'M', u'ヒ'), @@ -5514,11 +5515,11 @@ def _seg_52(): (0x10000, 'V'), (0x1000C, 'X'), (0x1000D, 'V'), - (0x10027, 'X'), ] def _seg_53(): return [ + (0x10027, 'X'), (0x10028, 'V'), (0x1003B, 'X'), (0x1003C, 'V'), @@ -5536,7 +5537,7 @@ def _seg_53(): (0x10137, 'V'), (0x1018F, 'X'), (0x10190, 'V'), - (0x1019C, 'X'), + (0x1019D, 'X'), (0x101A0, 'V'), (0x101A1, 'X'), (0x101D0, 'V'), @@ -5618,11 +5619,11 @@ def _seg_53(): (0x104BC, 'M', u'𐓤'), (0x104BD, 'M', u'𐓥'), (0x104BE, 'M', u'𐓦'), - (0x104BF, 'M', u'𐓧'), ] def _seg_54(): return [ + (0x104BF, 'M', u'𐓧'), (0x104C0, 'M', u'𐓨'), (0x104C1, 'M', u'𐓩'), (0x104C2, 'M', u'𐓪'), @@ -5722,11 +5723,11 @@ def _seg_54(): (0x10B9D, 'X'), (0x10BA9, 'V'), (0x10BB0, 'X'), - (0x10C00, 'V'), ] def _seg_55(): return [ + (0x10C00, 'V'), (0x10C49, 'X'), (0x10C80, 'M', u'𐳀'), (0x10C81, 'M', u'𐳁'), @@ -5788,10 +5789,18 @@ def _seg_55(): (0x10D3A, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), (0x10F00, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), (0x10FE0, 'V'), (0x10FF7, 'X'), (0x11000, 'V'), @@ -5809,17 +5818,19 @@ def _seg_55(): (0x11100, 'V'), (0x11135, 'X'), (0x11136, 'V'), - (0x11147, 'X'), + (0x11148, 'X'), (0x11150, 'V'), (0x11177, 'X'), (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), (0x111E0, 'X'), (0x111E1, 'V'), (0x111F5, 'X'), (0x11200, 'V'), (0x11212, 'X'), + ] + +def _seg_56(): + return [ (0x11213, 'V'), (0x1123F, 'X'), (0x11280, 'V'), @@ -5827,10 +5838,6 @@ def _seg_55(): (0x11288, 'V'), (0x11289, 'X'), (0x1128A, 'V'), - ] - -def _seg_56(): - return [ (0x1128E, 'X'), (0x1128F, 'V'), (0x1129E, 'X'), @@ -5871,11 +5878,9 @@ def _seg_56(): (0x11370, 'V'), (0x11375, 'X'), (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), (0x1145C, 'X'), (0x1145D, 'V'), - (0x11460, 'X'), + (0x11462, 'X'), (0x11480, 'V'), (0x114C8, 'X'), (0x114D0, 'V'), @@ -5926,22 +5931,36 @@ def _seg_56(): (0x118B5, 'M', u'𑣕'), (0x118B6, 'M', u'𑣖'), (0x118B7, 'M', u'𑣗'), + ] + +def _seg_57(): + return [ (0x118B8, 'M', u'𑣘'), (0x118B9, 'M', u'𑣙'), (0x118BA, 'M', u'𑣚'), (0x118BB, 'M', u'𑣛'), (0x118BC, 'M', u'𑣜'), - ] - -def _seg_57(): - return [ (0x118BD, 'M', u'𑣝'), (0x118BE, 'M', u'𑣞'), (0x118BF, 'M', u'𑣟'), (0x118C0, 'V'), (0x118F3, 'X'), (0x118FF, 'V'), - (0x11900, 'X'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), (0x119A0, 'V'), (0x119A8, 'X'), (0x119AA, 'V'), @@ -5996,6 +6015,8 @@ def _seg_57(): (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), (0x11FC0, 'V'), (0x11FF2, 'X'), (0x11FFF, 'V'), @@ -6014,6 +6035,10 @@ def _seg_57(): (0x16A39, 'X'), (0x16A40, 'V'), (0x16A5F, 'X'), + ] + +def _seg_58(): + return [ (0x16A60, 'V'), (0x16A6A, 'X'), (0x16A6E, 'V'), @@ -6035,10 +6060,6 @@ def _seg_57(): (0x16E40, 'M', u'𖹠'), (0x16E41, 'M', u'𖹡'), (0x16E42, 'M', u'𖹢'), - ] - -def _seg_58(): - return [ (0x16E43, 'M', u'𖹣'), (0x16E44, 'M', u'𖹤'), (0x16E45, 'M', u'𖹥'), @@ -6077,11 +6098,15 @@ def _seg_58(): (0x16F8F, 'V'), (0x16FA0, 'X'), (0x16FE0, 'V'), - (0x16FE4, 'X'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), (0x17000, 'V'), (0x187F8, 'X'), (0x18800, 'V'), - (0x18AF3, 'X'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), (0x1B000, 'V'), (0x1B11F, 'X'), (0x1B150, 'V'), @@ -6114,6 +6139,10 @@ def _seg_58(): (0x1D163, 'M', u'𝅘𝅥𝅱'), (0x1D164, 'M', u'𝅘𝅥𝅲'), (0x1D165, 'V'), + ] + +def _seg_59(): + return [ (0x1D173, 'X'), (0x1D17B, 'V'), (0x1D1BB, 'M', u'𝆹𝅥'), @@ -6139,10 +6168,6 @@ def _seg_58(): (0x1D404, 'M', u'e'), (0x1D405, 'M', u'f'), (0x1D406, 'M', u'g'), - ] - -def _seg_59(): - return [ (0x1D407, 'M', u'h'), (0x1D408, 'M', u'i'), (0x1D409, 'M', u'j'), @@ -6218,6 +6243,10 @@ def _seg_59(): (0x1D44F, 'M', u'b'), (0x1D450, 'M', u'c'), (0x1D451, 'M', u'd'), + ] + +def _seg_60(): + return [ (0x1D452, 'M', u'e'), (0x1D453, 'M', u'f'), (0x1D454, 'M', u'g'), @@ -6243,10 +6272,6 @@ def _seg_59(): (0x1D468, 'M', u'a'), (0x1D469, 'M', u'b'), (0x1D46A, 'M', u'c'), - ] - -def _seg_60(): - return [ (0x1D46B, 'M', u'd'), (0x1D46C, 'M', u'e'), (0x1D46D, 'M', u'f'), @@ -6322,6 +6347,10 @@ def _seg_60(): (0x1D4B6, 'M', u'a'), (0x1D4B7, 'M', u'b'), (0x1D4B8, 'M', u'c'), + ] + +def _seg_61(): + return [ (0x1D4B9, 'M', u'd'), (0x1D4BA, 'X'), (0x1D4BB, 'M', u'f'), @@ -6347,10 +6376,6 @@ def _seg_60(): (0x1D4CF, 'M', u'z'), (0x1D4D0, 'M', u'a'), (0x1D4D1, 'M', u'b'), - ] - -def _seg_61(): - return [ (0x1D4D2, 'M', u'c'), (0x1D4D3, 'M', u'd'), (0x1D4D4, 'M', u'e'), @@ -6426,6 +6451,10 @@ def _seg_61(): (0x1D51B, 'M', u'x'), (0x1D51C, 'M', u'y'), (0x1D51D, 'X'), + ] + +def _seg_62(): + return [ (0x1D51E, 'M', u'a'), (0x1D51F, 'M', u'b'), (0x1D520, 'M', u'c'), @@ -6451,10 +6480,6 @@ def _seg_61(): (0x1D534, 'M', u'w'), (0x1D535, 'M', u'x'), (0x1D536, 'M', u'y'), - ] - -def _seg_62(): - return [ (0x1D537, 'M', u'z'), (0x1D538, 'M', u'a'), (0x1D539, 'M', u'b'), @@ -6530,6 +6555,10 @@ def _seg_62(): (0x1D581, 'M', u'v'), (0x1D582, 'M', u'w'), (0x1D583, 'M', u'x'), + ] + +def _seg_63(): + return [ (0x1D584, 'M', u'y'), (0x1D585, 'M', u'z'), (0x1D586, 'M', u'a'), @@ -6555,10 +6584,6 @@ def _seg_62(): (0x1D59A, 'M', u'u'), (0x1D59B, 'M', u'v'), (0x1D59C, 'M', u'w'), - ] - -def _seg_63(): - return [ (0x1D59D, 'M', u'x'), (0x1D59E, 'M', u'y'), (0x1D59F, 'M', u'z'), @@ -6634,6 +6659,10 @@ def _seg_63(): (0x1D5E5, 'M', u'r'), (0x1D5E6, 'M', u's'), (0x1D5E7, 'M', u't'), + ] + +def _seg_64(): + return [ (0x1D5E8, 'M', u'u'), (0x1D5E9, 'M', u'v'), (0x1D5EA, 'M', u'w'), @@ -6659,10 +6688,6 @@ def _seg_63(): (0x1D5FE, 'M', u'q'), (0x1D5FF, 'M', u'r'), (0x1D600, 'M', u's'), - ] - -def _seg_64(): - return [ (0x1D601, 'M', u't'), (0x1D602, 'M', u'u'), (0x1D603, 'M', u'v'), @@ -6738,6 +6763,10 @@ def _seg_64(): (0x1D649, 'M', u'n'), (0x1D64A, 'M', u'o'), (0x1D64B, 'M', u'p'), + ] + +def _seg_65(): + return [ (0x1D64C, 'M', u'q'), (0x1D64D, 'M', u'r'), (0x1D64E, 'M', u's'), @@ -6763,10 +6792,6 @@ def _seg_64(): (0x1D662, 'M', u'm'), (0x1D663, 'M', u'n'), (0x1D664, 'M', u'o'), - ] - -def _seg_65(): - return [ (0x1D665, 'M', u'p'), (0x1D666, 'M', u'q'), (0x1D667, 'M', u'r'), @@ -6842,6 +6867,10 @@ def _seg_65(): (0x1D6AE, 'M', u'η'), (0x1D6AF, 'M', u'θ'), (0x1D6B0, 'M', u'ι'), + ] + +def _seg_66(): + return [ (0x1D6B1, 'M', u'κ'), (0x1D6B2, 'M', u'λ'), (0x1D6B3, 'M', u'μ'), @@ -6867,10 +6896,6 @@ def _seg_65(): (0x1D6C7, 'M', u'ζ'), (0x1D6C8, 'M', u'η'), (0x1D6C9, 'M', u'θ'), - ] - -def _seg_66(): - return [ (0x1D6CA, 'M', u'ι'), (0x1D6CB, 'M', u'κ'), (0x1D6CC, 'M', u'λ'), @@ -6946,6 +6971,10 @@ def _seg_66(): (0x1D714, 'M', u'ω'), (0x1D715, 'M', u'∂'), (0x1D716, 'M', u'ε'), + ] + +def _seg_67(): + return [ (0x1D717, 'M', u'θ'), (0x1D718, 'M', u'κ'), (0x1D719, 'M', u'φ'), @@ -6971,10 +7000,6 @@ def _seg_66(): (0x1D72D, 'M', u'θ'), (0x1D72E, 'M', u'σ'), (0x1D72F, 'M', u'τ'), - ] - -def _seg_67(): - return [ (0x1D730, 'M', u'υ'), (0x1D731, 'M', u'φ'), (0x1D732, 'M', u'χ'), @@ -7050,6 +7075,10 @@ def _seg_67(): (0x1D779, 'M', u'κ'), (0x1D77A, 'M', u'λ'), (0x1D77B, 'M', u'μ'), + ] + +def _seg_68(): + return [ (0x1D77C, 'M', u'ν'), (0x1D77D, 'M', u'ξ'), (0x1D77E, 'M', u'ο'), @@ -7075,10 +7104,6 @@ def _seg_67(): (0x1D793, 'M', u'δ'), (0x1D794, 'M', u'ε'), (0x1D795, 'M', u'ζ'), - ] - -def _seg_68(): - return [ (0x1D796, 'M', u'η'), (0x1D797, 'M', u'θ'), (0x1D798, 'M', u'ι'), @@ -7154,6 +7179,10 @@ def _seg_68(): (0x1D7E1, 'M', u'9'), (0x1D7E2, 'M', u'0'), (0x1D7E3, 'M', u'1'), + ] + +def _seg_69(): + return [ (0x1D7E4, 'M', u'2'), (0x1D7E5, 'M', u'3'), (0x1D7E6, 'M', u'4'), @@ -7179,10 +7208,6 @@ def _seg_68(): (0x1D7FA, 'M', u'4'), (0x1D7FB, 'M', u'5'), (0x1D7FC, 'M', u'6'), - ] - -def _seg_69(): - return [ (0x1D7FD, 'M', u'7'), (0x1D7FE, 'M', u'8'), (0x1D7FF, 'M', u'9'), @@ -7258,6 +7283,10 @@ def _seg_69(): (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), + ] + +def _seg_70(): + return [ (0x1EC71, 'V'), (0x1ECB5, 'X'), (0x1ED01, 'V'), @@ -7283,10 +7312,6 @@ def _seg_69(): (0x1EE12, 'M', u'ق'), (0x1EE13, 'M', u'ر'), (0x1EE14, 'M', u'ش'), - ] - -def _seg_70(): - return [ (0x1EE15, 'M', u'ت'), (0x1EE16, 'M', u'ث'), (0x1EE17, 'M', u'خ'), @@ -7362,6 +7387,10 @@ def _seg_70(): (0x1EE68, 'M', u'ط'), (0x1EE69, 'M', u'ي'), (0x1EE6A, 'M', u'ك'), + ] + +def _seg_71(): + return [ (0x1EE6B, 'X'), (0x1EE6C, 'M', u'م'), (0x1EE6D, 'M', u'ن'), @@ -7387,10 +7416,6 @@ def _seg_70(): (0x1EE81, 'M', u'ب'), (0x1EE82, 'M', u'ج'), (0x1EE83, 'M', u'د'), - ] - -def _seg_71(): - return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), @@ -7466,10 +7491,13 @@ def _seg_71(): (0x1F106, '3', u'5,'), (0x1F107, '3', u'6,'), (0x1F108, '3', u'7,'), + ] + +def _seg_72(): + return [ (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), (0x1F10B, 'V'), - (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), @@ -7491,10 +7519,6 @@ def _seg_71(): (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), - ] - -def _seg_72(): - return [ (0x1F125, '3', u'(v)'), (0x1F126, '3', u'(w)'), (0x1F127, '3', u'(x)'), @@ -7542,11 +7566,10 @@ def _seg_72(): (0x1F16A, 'M', u'mc'), (0x1F16B, 'M', u'md'), (0x1F16C, 'M', u'mr'), - (0x1F16D, 'X'), - (0x1F170, 'V'), + (0x1F16D, 'V'), (0x1F190, 'M', u'dj'), (0x1F191, 'V'), - (0x1F1AD, 'X'), + (0x1F1AE, 'X'), (0x1F1E6, 'V'), (0x1F200, 'M', u'ほか'), (0x1F201, 'M', u'ココ'), @@ -7572,6 +7595,10 @@ def _seg_72(): (0x1F221, 'M', u'終'), (0x1F222, 'M', u'生'), (0x1F223, 'M', u'販'), + ] + +def _seg_73(): + return [ (0x1F224, 'M', u'声'), (0x1F225, 'M', u'吹'), (0x1F226, 'M', u'演'), @@ -7595,10 +7622,6 @@ def _seg_72(): (0x1F238, 'M', u'申'), (0x1F239, 'M', u'割'), (0x1F23A, 'M', u'営'), - ] - -def _seg_73(): - return [ (0x1F23B, 'M', u'配'), (0x1F23C, 'X'), (0x1F240, 'M', u'〔本〕'), @@ -7617,11 +7640,11 @@ def _seg_73(): (0x1F260, 'V'), (0x1F266, 'X'), (0x1F300, 'V'), - (0x1F6D6, 'X'), + (0x1F6D8, 'X'), (0x1F6E0, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), - (0x1F6FB, 'X'), + (0x1F6FD, 'X'), (0x1F700, 'V'), (0x1F774, 'X'), (0x1F780, 'V'), @@ -7638,32 +7661,51 @@ def _seg_73(): (0x1F888, 'X'), (0x1F890, 'V'), (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F90D, 'V'), - (0x1F972, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), + (0x1F979, 'X'), (0x1F97A, 'V'), - (0x1F9A3, 'X'), - (0x1F9A5, 'V'), - (0x1F9AB, 'X'), - (0x1F9AE, 'V'), - (0x1F9CB, 'X'), + (0x1F9CC, 'X'), (0x1F9CD, 'V'), (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), (0x1FA70, 'V'), - (0x1FA74, 'X'), + (0x1FA75, 'X'), (0x1FA78, 'V'), (0x1FA7B, 'X'), (0x1FA80, 'V'), - (0x1FA83, 'X'), + (0x1FA87, 'X'), (0x1FA90, 'V'), - (0x1FA96, 'X'), + (0x1FAA9, 'X'), + (0x1FAB0, 'V'), + (0x1FAB7, 'X'), + (0x1FAC0, 'V'), + (0x1FAC3, 'X'), + (0x1FAD0, 'V'), + (0x1FAD7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', u'0'), + (0x1FBF1, 'M', u'1'), + (0x1FBF2, 'M', u'2'), + (0x1FBF3, 'M', u'3'), + (0x1FBF4, 'M', u'4'), + (0x1FBF5, 'M', u'5'), + (0x1FBF6, 'M', u'6'), + (0x1FBF7, 'M', u'7'), + (0x1FBF8, 'M', u'8'), + (0x1FBF9, 'M', u'9'), + ] + +def _seg_74(): + return [ + (0x1FBFA, 'X'), (0x20000, 'V'), - (0x2A6D7, 'X'), + (0x2A6DE, 'X'), (0x2A700, 'V'), (0x2B735, 'X'), (0x2B740, 'V'), @@ -7699,10 +7741,6 @@ def _seg_73(): (0x2F818, 'M', u'冤'), (0x2F819, 'M', u'仌'), (0x2F81A, 'M', u'冬'), - ] - -def _seg_74(): - return [ (0x2F81B, 'M', u'况'), (0x2F81C, 'M', u'𩇟'), (0x2F81D, 'M', u'凵'), @@ -7765,6 +7803,10 @@ def _seg_74(): (0x2F859, 'M', u'𡓤'), (0x2F85A, 'M', u'売'), (0x2F85B, 'M', u'壷'), + ] + +def _seg_75(): + return [ (0x2F85C, 'M', u'夆'), (0x2F85D, 'M', u'多'), (0x2F85E, 'M', u'夢'), @@ -7803,10 +7845,6 @@ def _seg_74(): (0x2F880, 'M', u'嵼'), (0x2F881, 'M', u'巡'), (0x2F882, 'M', u'巢'), - ] - -def _seg_75(): - return [ (0x2F883, 'M', u'㠯'), (0x2F884, 'M', u'巽'), (0x2F885, 'M', u'帨'), @@ -7869,6 +7907,10 @@ def _seg_75(): (0x2F8C0, 'M', u'揅'), (0x2F8C1, 'M', u'掩'), (0x2F8C2, 'M', u'㨮'), + ] + +def _seg_76(): + return [ (0x2F8C3, 'M', u'摩'), (0x2F8C4, 'M', u'摾'), (0x2F8C5, 'M', u'撝'), @@ -7907,10 +7949,6 @@ def _seg_75(): (0x2F8E6, 'M', u'椔'), (0x2F8E7, 'M', u'㮝'), (0x2F8E8, 'M', u'楂'), - ] - -def _seg_76(): - return [ (0x2F8E9, 'M', u'榣'), (0x2F8EA, 'M', u'槪'), (0x2F8EB, 'M', u'檨'), @@ -7973,6 +8011,10 @@ def _seg_76(): (0x2F924, 'M', u'犀'), (0x2F925, 'M', u'犕'), (0x2F926, 'M', u'𤜵'), + ] + +def _seg_77(): + return [ (0x2F927, 'M', u'𤠔'), (0x2F928, 'M', u'獺'), (0x2F929, 'M', u'王'), @@ -8011,10 +8053,6 @@ def _seg_76(): (0x2F94C, 'M', u'䂖'), (0x2F94D, 'M', u'𥐝'), (0x2F94E, 'M', u'硎'), - ] - -def _seg_77(): - return [ (0x2F94F, 'M', u'碌'), (0x2F950, 'M', u'磌'), (0x2F951, 'M', u'䃣'), @@ -8077,6 +8115,10 @@ def _seg_77(): (0x2F98B, 'M', u'舁'), (0x2F98C, 'M', u'舄'), (0x2F98D, 'M', u'辞'), + ] + +def _seg_78(): + return [ (0x2F98E, 'M', u'䑫'), (0x2F98F, 'M', u'芑'), (0x2F990, 'M', u'芋'), @@ -8115,10 +8157,6 @@ def _seg_77(): (0x2F9B1, 'M', u'𧃒'), (0x2F9B2, 'M', u'䕫'), (0x2F9B3, 'M', u'虐'), - ] - -def _seg_78(): - return [ (0x2F9B4, 'M', u'虜'), (0x2F9B5, 'M', u'虧'), (0x2F9B6, 'M', u'虩'), @@ -8181,6 +8219,10 @@ def _seg_78(): (0x2F9EF, 'M', u'䦕'), (0x2F9F0, 'M', u'閷'), (0x2F9F1, 'M', u'𨵷'), + ] + +def _seg_79(): + return [ (0x2F9F2, 'M', u'䧦'), (0x2F9F3, 'M', u'雃'), (0x2F9F4, 'M', u'嶲'), @@ -8219,16 +8261,14 @@ def _seg_78(): (0x2FA16, 'M', u'䵖'), (0x2FA17, 'M', u'黹'), (0x2FA18, 'M', u'黾'), - ] - -def _seg_79(): - return [ (0x2FA19, 'M', u'鼅'), (0x2FA1A, 'M', u'鼏'), (0x2FA1B, 'M', u'鼖'), (0x2FA1C, 'M', u'鼻'), (0x2FA1D, 'M', u'𪘀'), (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), (0xE0100, 'I'), (0xE01F0, 'X'), ] diff --git a/pipenv/vendor/importlib_metadata/__init__.py b/pipenv/vendor/importlib_metadata/__init__.py index 95a08ba013..7031323db7 100644 --- a/pipenv/vendor/importlib_metadata/__init__.py +++ b/pipenv/vendor/importlib_metadata/__init__.py @@ -29,6 +29,7 @@ email_message_from_string, PyPy_repr, unique_ordered, + str, ) from importlib import import_module from itertools import starmap @@ -54,6 +55,15 @@ class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" + def __str__(self): + tmpl = "No package metadata was found for {self.name}" + return tmpl.format(**locals()) + + @property + def name(self): + name, = self.args + return name + class EntryPoint( PyPy_repr, @@ -198,7 +208,7 @@ def from_name(cls, name): """ for resolver in cls._discover_resolvers(): dists = resolver(DistributionFinder.Context(name=name)) - dist = next(dists, None) + dist = next(iter(dists), None) if dist is not None: return dist else: @@ -241,6 +251,17 @@ def _discover_resolvers(): ) return filter(None, declared) + @classmethod + def _local(cls, root='.'): + from pep517 import build, meta + system = build.compat_system(root) + builder = functools.partial( + meta.build, + source_dir=root, + system=system, + ) + return PathDistribution(zipp.Path(meta.build_as_zip(builder))) + @property def metadata(self): """Return the parsed metadata for this Distribution. @@ -418,8 +439,8 @@ class FastPath: """ def __init__(self, root): - self.root = root - self.base = os.path.basename(root).lower() + self.root = str(root) + self.base = os.path.basename(self.root).lower() def joinpath(self, child): return pathlib.Path(self.root, child) @@ -597,6 +618,3 @@ def requires(distribution_name): packaging.requirement.Requirement. """ return distribution(distribution_name).requires - - -__version__ = version(__name__) diff --git a/pipenv/vendor/importlib_metadata/_compat.py b/pipenv/vendor/importlib_metadata/_compat.py index f59b57dbcb..303d4a22e8 100644 --- a/pipenv/vendor/importlib_metadata/_compat.py +++ b/pipenv/vendor/importlib_metadata/_compat.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals import io import abc @@ -26,6 +26,8 @@ NotADirectoryError = IOError, OSError PermissionError = IOError, OSError +str = type('') + suppress = contextlib.suppress if sys.version_info > (3, 5): # pragma: nocover diff --git a/pipenv/vendor/importlib_metadata/docs/changelog.rst b/pipenv/vendor/importlib_metadata/docs/changelog.rst index d638e38d20..b7e93b5d7f 100644 --- a/pipenv/vendor/importlib_metadata/docs/changelog.rst +++ b/pipenv/vendor/importlib_metadata/docs/changelog.rst @@ -2,6 +2,38 @@ importlib_metadata NEWS ========================= +v2.0.0 +====== + +* ``importlib_metadata`` no longer presents a + ``__version__`` attribute. Consumers wishing to + resolve the version of the package should query it + directly with + ``importlib_metadata.version('importlib-metadata')``. + Closes #71. + +v1.7.0 +====== + +* ``PathNotFoundError`` now has a custom ``__str__`` + mentioning "package metadata" being missing to help + guide users to the cause when the package is installed + but no metadata is present. Closes #124. + +v1.6.1 +====== + +* Added ``Distribution._local()`` as a provisional + demonstration of how to load metadata for a local + package. Implicitly requires that + `pep517 `_ is + installed. Ref #42. +* Ensure inputs to FastPath are Unicode. Closes #121. +* Tests now rely on ``importlib.resources.files`` (and + backport) instead of the older ``path`` function. +* Support any iterable from ``find_distributions``. + Closes #122. + v1.6.0 ====== diff --git a/pipenv/vendor/importlib_metadata/docs/using.rst b/pipenv/vendor/importlib_metadata/docs/using.rst index d1ca765899..11965147f4 100644 --- a/pipenv/vendor/importlib_metadata/docs/using.rst +++ b/pipenv/vendor/importlib_metadata/docs/using.rst @@ -95,7 +95,7 @@ The ``group`` and ``name`` are arbitrary values defined by the package author and usually a client will wish to resolve all entry points for a particular group. Read `the setuptools docs `_ -for more information on entrypoints, their definition, and usage. +for more information on entry points, their definition, and usage. .. _metadata: @@ -236,7 +236,7 @@ method:: """ The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` -properties indicating the path to search and names to match and may +properties indicating the path to search and name to match and may supply other relevant context. What this means in practice is that to support finding distribution package diff --git a/pipenv/vendor/importlib_metadata/tests/fixtures.py b/pipenv/vendor/importlib_metadata/tests/fixtures.py index 218b699c0a..20982fa1c4 100644 --- a/pipenv/vendor/importlib_metadata/tests/fixtures.py +++ b/pipenv/vendor/importlib_metadata/tests/fixtures.py @@ -5,6 +5,7 @@ import shutil import tempfile import textwrap +import test.support from .._compat import pathlib, contextlib @@ -166,6 +167,21 @@ def setUp(self): build_files(EggInfoFile.files, prefix=self.site_dir) +class LocalPackage: + files = { + "setup.py": """ + import setuptools + setuptools.setup(name="local-pkg", version="2.0.1") + """, + } + + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.fixtures.enter_context(tempdir_as_cwd()) + build_files(self.files) + + def build_files(file_defs, prefix=pathlib.Path()): """Build a set of files/directories, as described by the @@ -200,6 +216,12 @@ def build_files(file_defs, prefix=pathlib.Path()): f.write(DALS(contents)) +class FileBuilder: + def unicode_filename(self): + return test.support.FS_NONASCII or \ + self.skip("File system does not support non-ascii.") + + def DALS(str): "Dedent and left-strip" return textwrap.dedent(str).lstrip() diff --git a/pipenv/vendor/importlib_metadata/tests/test_api.py b/pipenv/vendor/importlib_metadata/tests/test_api.py index aa346ddb20..eb0ff53b3b 100644 --- a/pipenv/vendor/importlib_metadata/tests/test_api.py +++ b/pipenv/vendor/importlib_metadata/tests/test_api.py @@ -4,7 +4,7 @@ from . import fixtures from .. import ( - Distribution, PackageNotFoundError, __version__, distribution, + Distribution, PackageNotFoundError, distribution, entry_points, files, metadata, requires, version, ) @@ -68,7 +68,8 @@ def test_metadata_for_this_package(self): assert 'Topic :: Software Development :: Libraries' in classifiers def test_importlib_metadata_version(self): - assert re.match(self.version_pattern, __version__) + resolved = version('importlib-metadata') + assert re.match(self.version_pattern, resolved) @staticmethod def _test_files(files): diff --git a/pipenv/vendor/importlib_metadata/tests/test_integration.py b/pipenv/vendor/importlib_metadata/tests/test_integration.py index c881927d55..cbb940bd46 100644 --- a/pipenv/vendor/importlib_metadata/tests/test_integration.py +++ b/pipenv/vendor/importlib_metadata/tests/test_integration.py @@ -1,9 +1,14 @@ +# coding: utf-8 + +from __future__ import unicode_literals + import unittest import packaging.requirements import packaging.version from . import fixtures from .. import ( + Distribution, _compat, version, ) @@ -40,3 +45,10 @@ def __getattribute__(self, name): self.fixtures.enter_context( fixtures.install_finder(ModuleFreeFinder())) _compat.disable_stdlib_finder() + + +class LocalProjectTests(fixtures.LocalPackage, unittest.TestCase): + def test_find_local(self): + dist = Distribution._local() + assert dist.metadata['Name'] == 'local-pkg' + assert dist.version == '2.0.1' diff --git a/pipenv/vendor/importlib_metadata/tests/test_main.py b/pipenv/vendor/importlib_metadata/tests/test_main.py index 876203f6f2..4ffdd5d666 100644 --- a/pipenv/vendor/importlib_metadata/tests/test_main.py +++ b/pipenv/vendor/importlib_metadata/tests/test_main.py @@ -35,6 +35,18 @@ def test_for_name_does_not_exist(self): with self.assertRaises(PackageNotFoundError): Distribution.from_name('does-not-exist') + def test_package_not_found_mentions_metadata(self): + """ + When a package is not found, that could indicate that the + packgae is not installed or that it is installed without + metadata. Ensure the exception mentions metadata to help + guide users toward the cause. See #124. + """ + with self.assertRaises(PackageNotFoundError) as ctx: + Distribution.from_name('does-not-exist') + + assert "metadata" in str(ctx.exception) + def test_new_style_classes(self): self.assertIsInstance(Distribution, type) self.assertIsInstance(MetadataPathFinder, type) @@ -256,3 +268,18 @@ def test_module(self): def test_attr(self): assert self.ep.attr is None + + +class FileSystem( + fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, + unittest.TestCase): + def test_unicode_dir_on_sys_path(self): + """ + Ensure a Unicode subdirectory of a directory on sys.path + does not crash. + """ + fixtures.build_files( + {self.unicode_filename(): {}}, + prefix=self.site_dir, + ) + list(distributions()) diff --git a/pipenv/vendor/importlib_metadata/tests/test_zip.py b/pipenv/vendor/importlib_metadata/tests/test_zip.py index 515f593dba..4aae933d9d 100644 --- a/pipenv/vendor/importlib_metadata/tests/test_zip.py +++ b/pipenv/vendor/importlib_metadata/tests/test_zip.py @@ -7,9 +7,11 @@ ) try: - from importlib.resources import path -except ImportError: - from importlib_resources import path + from importlib import resources + getattr(resources, 'files') + getattr(resources, 'as_file') +except (ImportError, AttributeError): + import importlib_resources as resources try: from contextlib import ExitStack @@ -20,15 +22,19 @@ class TestZip(unittest.TestCase): root = 'importlib_metadata.tests.data' + def _fixture_on_path(self, filename): + pkg_file = resources.files(self.root).joinpath(filename) + file = self.resources.enter_context(resources.as_file(pkg_file)) + assert file.name.startswith('example-'), file.name + sys.path.insert(0, str(file)) + self.resources.callback(sys.path.pop, 0) + def setUp(self): # Find the path to the example-*.whl so we can add it to the front of # sys.path, where we'll then try to find the metadata thereof. self.resources = ExitStack() self.addCleanup(self.resources.close) - wheel = self.resources.enter_context( - path(self.root, 'example-21.12-py3-none-any.whl')) - sys.path.insert(0, str(wheel)) - self.resources.callback(sys.path.pop, 0) + self._fixture_on_path('example-21.12-py3-none-any.whl') def test_zip_version(self): self.assertEqual(version('example'), '21.12') @@ -66,10 +72,7 @@ def setUp(self): # sys.path, where we'll then try to find the metadata thereof. self.resources = ExitStack() self.addCleanup(self.resources.close) - egg = self.resources.enter_context( - path(self.root, 'example-21.12-py3.6.egg')) - sys.path.insert(0, str(egg)) - self.resources.callback(sys.path.pop, 0) + self._fixture_on_path('example-21.12-py3.6.egg') def test_files(self): for file in files('example'): diff --git a/pipenv/vendor/importlib_resources/__init__.py b/pipenv/vendor/importlib_resources/__init__.py index 4bce94b0fb..f122f95e87 100644 --- a/pipenv/vendor/importlib_resources/__init__.py +++ b/pipenv/vendor/importlib_resources/__init__.py @@ -2,10 +2,12 @@ import sys -from ._compat import metadata -from ._common import as_file +from ._common import ( + as_file, files, + ) -# for compatibility. Ref #88 +# For compatibility. Ref #88. +# Also requires hook-importlib_resources.py (Ref #101). __import__('importlib_resources.trees') @@ -30,7 +32,6 @@ Package, Resource, contents, - files, is_resource, open_binary, open_text, @@ -42,7 +43,6 @@ else: from importlib_resources._py2 import ( contents, - files, is_resource, open_binary, open_text, @@ -51,6 +51,3 @@ read_text, ) del __all__[:3] - - -__version__ = metadata.version('importlib_resources') diff --git a/pipenv/vendor/importlib_resources/_common.py b/pipenv/vendor/importlib_resources/_common.py index 3a5b7e445c..a7c2bf815d 100644 --- a/pipenv/vendor/importlib_resources/_common.py +++ b/pipenv/vendor/importlib_resources/_common.py @@ -3,40 +3,85 @@ import os import tempfile import contextlib +import types +import importlib from ._compat import ( - Path, package_spec, FileNotFoundError, ZipPath, - singledispatch, suppress, + Path, FileNotFoundError, + singledispatch, package_spec, ) +if False: # TYPE_CHECKING + from typing import Union, Any, Optional + from .abc import ResourceReader + Package = Union[types.ModuleType, str] -def from_package(package): + +def files(package): """ - Return a Traversable object for the given package. + Get a Traversable resource from a package + """ + return from_package(get_package(package)) + + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + If the resulting string contains path separators, an exception is raised. """ - spec = package_spec(package) - return from_traversable_resources(spec) or fallback_resources(spec) + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError('{!r} must be only a file name'.format(path)) + return file_name -def from_traversable_resources(spec): +def get_resource_reader(package): + # type: (types.ModuleType) -> Optional[ResourceReader] """ - If the spec.loader implements TraversableResources, - directly or implicitly, it will have a ``files()`` method. + Return the package's loader if it's a ResourceReader. """ - with suppress(AttributeError): - return spec.loader.files() + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) + if reader is None: + return None + return reader(spec.name) -def fallback_resources(spec): - package_directory = Path(spec.origin).parent - try: - archive_path = spec.loader.archive - rel_path = package_directory.relative_to(archive_path) - return ZipPath(archive_path, str(rel_path) + '/') - except Exception: - pass - return package_directory +def resolve(cand): + # type: (Package) -> types.ModuleType + return ( + cand if isinstance(cand, types.ModuleType) + else importlib.import_module(cand) + ) + + +def get_package(package): + # type: (Package) -> types.ModuleType + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + resolved = resolve(package) + if package_spec(resolved).submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return resolved + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package_spec(package) + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() @contextlib.contextmanager @@ -48,6 +93,7 @@ def _tempfile(reader, suffix=''): try: os.write(fd, reader()) os.close(fd) + del reader yield Path(raw_path) finally: try: @@ -57,14 +103,12 @@ def _tempfile(reader, suffix=''): @singledispatch -@contextlib.contextmanager def as_file(path): """ Given a Traversable object, return that object as a path on the local file system in a context manager. """ - with _tempfile(path.read_bytes, suffix=path.name) as local: - yield local + return _tempfile(path.read_bytes, suffix=path.name) @as_file.register(Path) diff --git a/pipenv/vendor/importlib_resources/_compat.py b/pipenv/vendor/importlib_resources/_compat.py index 242a8487a3..70b0f6b4a4 100644 --- a/pipenv/vendor/importlib_resources/_compat.py +++ b/pipenv/vendor/importlib_resources/_compat.py @@ -1,16 +1,17 @@ from __future__ import absolute_import +import sys # flake8: noqa -try: +if sys.version_info > (3,5): from pathlib import Path, PurePath -except ImportError: +else: from pathlib2 import Path, PurePath # type: ignore -try: +if sys.version_info > (3,): from contextlib import suppress -except ImportError: +else: from contextlib2 import suppress # type: ignore @@ -36,9 +37,9 @@ class ABC(object): # type: ignore try: - from importlib import metadata -except ImportError: - import importlib_metadata as metadata # type: ignore + NotADirectoryError = NotADirectoryError # type: ignore +except NameError: + NotADirectoryError = OSError # type: ignore try: @@ -60,14 +61,79 @@ def runtime_checkable(cls): # type: ignore Protocol = ABC # type: ignore -class PackageSpec(object): - def __init__(self, **kwargs): - vars(self).update(kwargs) +__metaclass__ = type + + +class PackageSpec: + def __init__(self, **kwargs): + vars(self).update(kwargs) + + +class TraversableResourcesAdapter: + def __init__(self, spec): + self.spec = spec + self.loader = LoaderAdapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class LoaderAdapter: + """ + Adapt loaders to provide TraversableResources and other + compatibility. + """ + def __init__(self, spec): + self.spec = spec + + @property + def path(self): + # Python < 3 + return self.spec.origin + + def get_resource_reader(self, name): + # Python < 3.9 + from . import readers + + def _zip_reader(spec): + with suppress(AttributeError): + return readers.ZipReader(spec.loader, spec.name) + + def _namespace_reader(spec): + with suppress(AttributeError, ValueError): + return readers.NamespaceReader(spec.submodule_search_locations) + + def _available_reader(spec): + with suppress(AttributeError): + return spec.loader.get_resource_reader(spec.name) + + def _native_reader(spec): + reader = _available_reader(spec) + return reader if hasattr(reader, 'files') else None + + return ( + # native reader if it supplies 'files' + _native_reader(self.spec) or + # local ZipReader if a zip module + _zip_reader(self.spec) or + # local NamespaceReader if a namespace module + _namespace_reader(self.spec) or + # local FileReader + readers.FileReader(self) + ) def package_spec(package): - return getattr(package, '__spec__', None) or \ - PackageSpec( - origin=package.__file__, - loader=getattr(package, '__loader__', None), - ) + """ + Construct a minimal package spec suitable for + matching the interfaces this library relies upon + in later Python versions. + """ + spec = getattr(package, '__spec__', None) or \ + PackageSpec( + origin=package.__file__, + loader=getattr(package, '__loader__', None), + name=package.__name__, + submodule_search_locations=getattr(package, '__path__', None), + ) + return TraversableResourcesAdapter(spec) diff --git a/pipenv/vendor/importlib_resources/_py2.py b/pipenv/vendor/importlib_resources/_py2.py index 26ce45d282..dd8c7d627d 100644 --- a/pipenv/vendor/importlib_resources/_py2.py +++ b/pipenv/vendor/importlib_resources/_py2.py @@ -3,44 +3,13 @@ from . import _common from ._compat import FileNotFoundError -from importlib import import_module from io import BytesIO, TextIOWrapper, open as io_open -def _resolve(name): - """If name is a string, resolve to a module.""" - if not isinstance(name, basestring): # noqa: F821 - return name - return import_module(name) - - -def _get_package(package): - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - module = _resolve(package) - if not hasattr(module, '__path__'): - raise TypeError("{!r} is not a package".format(package)) - return module - - -def _normalize_path(path): - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - str_path = str(path) - parent, file_name = os.path.split(str_path) - if parent: - raise ValueError("{!r} must be only a file name".format(path)) - return file_name - - def open_binary(package, resource): """Return a file-like object opened for binary reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) + resource = _common.normalize_path(resource) + package = _common.get_package(package) # Using pathlib doesn't work well here due to the lack of 'strict' argument # for pathlib.Path.resolve() prior to Python 3.6. package_path = os.path.dirname(package.__file__) @@ -89,10 +58,6 @@ def read_text(package, resource, encoding='utf-8', errors='strict'): return fp.read() -def files(package): - return _common.from_package(_get_package(package)) - - def path(package, resource): """A context manager providing a file path object to the resource. @@ -102,7 +67,7 @@ def path(package, resource): raised if the file was deleted prior to the context manager exiting). """ - path = files(package).joinpath(_normalize_path(resource)) + path = _common.files(package).joinpath(_common.normalize_path(resource)) if not path.is_file(): raise FileNotFoundError(path) return _common.as_file(path) @@ -113,8 +78,8 @@ def is_resource(package, name): Directories are *not* resources. """ - package = _get_package(package) - _normalize_path(name) + package = _common.get_package(package) + _common.normalize_path(name) try: package_contents = set(contents(package)) except OSError as error: @@ -138,5 +103,5 @@ def contents(package): not considered resources. Use `is_resource()` on each entry returned here to check if it is a resource or not. """ - package = _get_package(package) + package = _common.get_package(package) return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/pipenv/vendor/importlib_resources/_py3.py b/pipenv/vendor/importlib_resources/_py3.py index 8dedde4c06..ffeb616d6e 100644 --- a/pipenv/vendor/importlib_resources/_py3.py +++ b/pipenv/vendor/importlib_resources/_py3.py @@ -1,10 +1,8 @@ import os -import sys +import io -from . import abc as resources_abc from . import _common -from contextlib import contextmanager, suppress -from importlib import import_module +from contextlib import suppress from importlib.abc import ResourceLoader from io import BytesIO, TextIOWrapper from pathlib import Path @@ -12,92 +10,48 @@ from typing import Iterable, Iterator, Optional, Set, Union # noqa: F401 from typing import cast from typing.io import BinaryIO, TextIO +from collections.abc import Sequence +from functools import singledispatch if False: # TYPE_CHECKING from typing import ContextManager Package = Union[ModuleType, str] -if sys.version_info >= (3, 6): - Resource = Union[str, os.PathLike] # pragma: <=35 -else: - Resource = str # pragma: >=36 - - -def _resolve(name) -> ModuleType: - """If name is a string, resolve to a module.""" - if hasattr(name, '__spec__'): - return name - return import_module(name) - - -def _get_package(package) -> ModuleType: - """Take a package name or module object and return the module. - - If a name, the module is imported. If the resolved module - object is not a package, raise an exception. - """ - module = _resolve(package) - if module.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format(package)) - return module - - -def _normalize_path(path) -> str: - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - str_path = str(path) - parent, file_name = os.path.split(str_path) - if parent: - raise ValueError('{!r} must be only a file name'.format(path)) - return file_name - - -def _get_resource_reader( - package: ModuleType) -> Optional[resources_abc.ResourceReader]: - # Return the package's loader if it's a ResourceReader. We can't use - # a issubclass() check here because apparently abc.'s __subclasscheck__() - # hook wants to create a weak reference to the object, but - # zipimport.zipimporter does not support weak references, resulting in a - # TypeError. That seems terrible. - spec = package.__spec__ - reader = getattr(spec.loader, 'get_resource_reader', None) - if reader is None: - return None - return cast(resources_abc.ResourceReader, reader(spec.name)) +Resource = Union[str, os.PathLike] def open_binary(package: Package, resource: Resource) -> BinaryIO: """Return a file-like object opened for binary reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) + resource = _common.normalize_path(resource) + package = _common.get_package(package) + reader = _common.get_resource_reader(package) if reader is not None: return reader.open_resource(resource) # Using pathlib doesn't work well here due to the lack of 'strict' # argument for pathlib.Path.resolve() prior to Python 3.6. - absolute_package_path = os.path.abspath( - package.__spec__.origin or 'non-existent file') - package_path = os.path.dirname(absolute_package_path) - full_path = os.path.join(package_path, resource) - try: - return open(full_path, mode='rb') - except OSError: - # Just assume the loader is a resource loader; all the relevant - # importlib.machinery loaders are and an AttributeError for - # get_data() will make it clear what is needed from the loader. - loader = cast(ResourceLoader, package.__spec__.loader) - data = None - if hasattr(package.__spec__.loader, 'get_data'): - with suppress(OSError): - data = loader.get_data(full_path) - if data is None: - package_name = package.__spec__.name - message = '{!r} resource not found in {!r}'.format( - resource, package_name) - raise FileNotFoundError(message) - return BytesIO(data) + if package.__spec__.submodule_search_locations is not None: + paths = package.__spec__.submodule_search_locations + elif package.__spec__.origin is not None: + paths = [os.path.dirname(os.path.abspath(package.__spec__.origin))] + + for package_path in paths: + full_path = os.path.join(package_path, resource) + try: + return open(full_path, mode='rb') + except OSError: + # Just assume the loader is a resource loader; all the relevant + # importlib.machinery loaders are and an AttributeError for + # get_data() will make it clear what is needed from the loader. + loader = cast(ResourceLoader, package.__spec__.loader) + data = None + if hasattr(package.__spec__.loader, 'get_data'): + with suppress(OSError): + data = loader.get_data(full_path) + if data is not None: + return BytesIO(data) + + raise FileNotFoundError('{!r} resource not found in {!r}'.format( + resource, package.__spec__.name)) def open_text(package: Package, @@ -128,13 +82,6 @@ def read_text(package: Package, return fp.read() -def files(package: Package) -> resources_abc.Traversable: - """ - Get a Traversable resource from a package - """ - return _common.from_package(_get_package(package)) - - def path( package: Package, resource: Resource, ) -> 'ContextManager[Path]': @@ -146,23 +93,28 @@ def path( raised if the file was deleted prior to the context manager exiting). """ - reader = _get_resource_reader(_get_package(package)) + reader = _common.get_resource_reader(_common.get_package(package)) return ( - _path_from_reader(reader, resource) + _path_from_reader(reader, _common.normalize_path(resource)) if reader else - _common.as_file(files(package).joinpath(_normalize_path(resource))) + _common.as_file( + _common.files(package).joinpath(_common.normalize_path(resource))) ) -@contextmanager def _path_from_reader(reader, resource): - norm_resource = _normalize_path(resource) + return _path_from_resource_path(reader, resource) or \ + _path_from_open_resource(reader, resource) + + +def _path_from_resource_path(reader, resource): with suppress(FileNotFoundError): - yield Path(reader.resource_path(norm_resource)) - return - opener_reader = reader.open_resource(norm_resource) - with _common._tempfile(opener_reader.read, suffix=norm_resource) as res: - yield res + return Path(reader.resource_path(resource)) + + +def _path_from_open_resource(reader, resource): + saved = io.BytesIO(reader.open_resource(resource).read()) + return _common._tempfile(saved.read, suffix=resource) def is_resource(package: Package, name: str) -> bool: @@ -170,9 +122,9 @@ def is_resource(package: Package, name: str) -> bool: Directories are *not* resources. """ - package = _get_package(package) - _normalize_path(name) - reader = _get_resource_reader(package) + package = _common.get_package(package) + _common.normalize_path(name) + reader = _common.get_resource_reader(package) if reader is not None: return reader.is_resource(name) package_contents = set(contents(package)) @@ -188,16 +140,21 @@ def contents(package: Package) -> Iterable[str]: not considered resources. Use `is_resource()` on each entry returned here to check if it is a resource or not. """ - package = _get_package(package) - reader = _get_resource_reader(package) + package = _common.get_package(package) + reader = _common.get_resource_reader(package) if reader is not None: - return reader.contents() - # Is the package a namespace package? By definition, namespace packages - # cannot have resources. - namespace = ( - package.__spec__.origin is None or - package.__spec__.origin == 'namespace' - ) - if namespace or not package.__spec__.has_location: - return () - return list(item.name for item in _common.from_package(package).iterdir()) + return _ensure_sequence(reader.contents()) + transversable = _common.from_package(package) + if transversable.is_dir(): + return list(item.name for item in transversable.iterdir()) + return [] + + +@singledispatch +def _ensure_sequence(iterable): + return list(iterable) + + +@_ensure_sequence.register(Sequence) +def _(iterable): + return iterable diff --git a/pipenv/vendor/importlib_resources/abc.py b/pipenv/vendor/importlib_resources/abc.py index 1f2c25a74e..18bc4ef876 100644 --- a/pipenv/vendor/importlib_resources/abc.py +++ b/pipenv/vendor/importlib_resources/abc.py @@ -136,7 +136,7 @@ def resource_path(self, resource): raise FileNotFoundError(resource) def is_resource(self, path): - return self.files().joinpath(path).isfile() + return self.files().joinpath(path).is_file() def contents(self): return (item.name for item in self.files().iterdir()) diff --git a/pipenv/vendor/importlib_resources/py.typed b/pipenv/vendor/importlib_resources/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/importlib_resources/readers.py b/pipenv/vendor/importlib_resources/readers.py new file mode 100644 index 0000000000..ce9c0caec4 --- /dev/null +++ b/pipenv/vendor/importlib_resources/readers.py @@ -0,0 +1,123 @@ +import os.path + +from collections import OrderedDict + +from . import abc + +from ._compat import Path, ZipPath +from ._compat import FileNotFoundError, NotADirectoryError + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.archive = loader.archive + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return ZipPath(self.archive, self.prefix) + + +class MultiplexedPath(abc.Traversable): + """ + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + def __init__(self, *paths): + paths = list(OrderedDict.fromkeys(paths)) # remove duplicates + self._paths = list(map(Path, paths)) + if not self._paths: + message = 'MultiplexedPath must contain at least one path' + raise FileNotFoundError(message) + if any(not path.is_dir() for path in self._paths): + raise NotADirectoryError( + 'MultiplexedPath only supports directories') + + def iterdir(self): + visited = [] + for path in self._paths: + for file in path.iterdir(): + if file.name in visited: + continue + visited.append(file.name) + yield file + + def read_bytes(self): + raise FileNotFoundError('{} is not a file'.format(self)) + + def read_text(self, *args, **kwargs): + raise FileNotFoundError('{} is not a file'.format(self)) + + def is_dir(self): + return True + + def is_file(self): + return False + + def joinpath(self, child): + # first try to find child in current paths + for file in self.iterdir(): + if file.name == child: + return file + # if it does not exist, construct it with the first path + return self._paths[0] / child + + __truediv__ = joinpath + + def open(self, *args, **kwargs): + raise FileNotFoundError('{} is not a file'.format(self)) + + def name(self): + return os.path.basename(self._paths[0]) + + def __repr__(self): + return 'MultiplexedPath({})'.format( + ', '.join("'{}'".format(path) for path in self._paths)) + + +class NamespaceReader(abc.TraversableResources): + def __init__(self, namespace_path): + if 'NamespacePath' not in str(namespace_path): + raise ValueError('Invalid path') + self.path = MultiplexedPath(*list(namespace_path)) + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path diff --git a/pipenv/vendor/packaging/__about__.py b/pipenv/vendor/packaging/__about__.py index 5161d141be..4d998578d7 100644 --- a/pipenv/vendor/packaging/__about__.py +++ b/pipenv/vendor/packaging/__about__.py @@ -18,10 +18,10 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "20.3" +__version__ = "20.4" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" -__license__ = "BSD or Apache License, Version 2.0" +__license__ = "BSD-2-Clause or Apache-2.0" __copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/pipenv/vendor/packaging/_compat.py b/pipenv/vendor/packaging/_compat.py index a145f7eeb3..e54bd4ede8 100644 --- a/pipenv/vendor/packaging/_compat.py +++ b/pipenv/vendor/packaging/_compat.py @@ -5,9 +5,9 @@ import sys -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import Any, Dict, Tuple, Type diff --git a/pipenv/vendor/packaging/_typing.py b/pipenv/vendor/packaging/_typing.py index dc6dfce7ad..77a8b9185a 100644 --- a/pipenv/vendor/packaging/_typing.py +++ b/pipenv/vendor/packaging/_typing.py @@ -18,22 +18,31 @@ In packaging, all static-typing related imports should be guarded as follows: - from packaging._typing import MYPY_CHECK_RUNNING + from packaging._typing import TYPE_CHECKING - if MYPY_CHECK_RUNNING: + if TYPE_CHECKING: from typing import ... Ref: https://github.com/python/mypy/issues/3216 """ -MYPY_CHECK_RUNNING = False +__all__ = ["TYPE_CHECKING", "cast"] -if MYPY_CHECK_RUNNING: # pragma: no cover - import typing - - cast = typing.cast +# The TYPE_CHECKING constant defined by the typing module is False at runtime +# but True while type checking. +if False: # pragma: no cover + from typing import TYPE_CHECKING +else: + TYPE_CHECKING = False + +# typing's cast syntax requires calling typing.cast at runtime, but we don't +# want to import typing at runtime. Here, we inform the type checkers that +# we're importing `typing.cast` as `cast` and re-implement typing.cast's +# runtime behavior in a block that is ignored by type checkers. +if TYPE_CHECKING: # pragma: no cover + # not executed at runtime + from typing import cast else: - # typing's cast() is needed at runtime, but we don't want to import typing. - # Thus, we use a dummy no-op version, which we tell mypy to ignore. - def cast(type_, value): # type: ignore + # executed at runtime + def cast(type_, value): # noqa return value diff --git a/pipenv/vendor/packaging/markers.py b/pipenv/vendor/packaging/markers.py index f017471139..87cd3f9585 100644 --- a/pipenv/vendor/packaging/markers.py +++ b/pipenv/vendor/packaging/markers.py @@ -13,10 +13,10 @@ from pyparsing import Literal as L # noqa from ._compat import string_types -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING from .specifiers import Specifier, InvalidSpecifier -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable, Dict, List, Optional, Tuple, Union Operator = Callable[[str, str], bool] diff --git a/pipenv/vendor/packaging/requirements.py b/pipenv/vendor/packaging/requirements.py index 1b547927df..91f81ede00 100644 --- a/pipenv/vendor/packaging/requirements.py +++ b/pipenv/vendor/packaging/requirements.py @@ -11,11 +11,11 @@ from pyparsing import Literal as L # noqa from six.moves.urllib import parse as urlparse -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import List diff --git a/pipenv/vendor/packaging/specifiers.py b/pipenv/vendor/packaging/specifiers.py index 94987486d4..fe09bb1dbb 100644 --- a/pipenv/vendor/packaging/specifiers.py +++ b/pipenv/vendor/packaging/specifiers.py @@ -9,10 +9,11 @@ import re from ._compat import string_types, with_metaclass -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version from .version import Version, LegacyVersion, parse -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import ( List, Dict, @@ -132,9 +133,14 @@ def __str__(self): # type: () -> str return "{0}{1}".format(*self._spec) + @property + def _canonical_spec(self): + # type: () -> Tuple[str, Union[Version, str]] + return self._spec[0], canonicalize_version(self._spec[1]) + def __hash__(self): # type: () -> int - return hash(self._spec) + return hash(self._canonical_spec) def __eq__(self, other): # type: (object) -> bool @@ -146,7 +152,7 @@ def __eq__(self, other): elif not isinstance(other, self.__class__): return NotImplemented - return self._spec == other._spec + return self._canonical_spec == other._canonical_spec def __ne__(self, other): # type: (object) -> bool @@ -510,12 +516,20 @@ def _compare_not_equal(self, prospective, spec): @_require_version_compare def _compare_less_than_equal(self, prospective, spec): # type: (ParsedVersion, str) -> bool - return prospective <= Version(spec) + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): # type: (ParsedVersion, str) -> bool - return prospective >= Version(spec) + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) @_require_version_compare def _compare_less_than(self, prospective, spec_str): diff --git a/pipenv/vendor/packaging/tags.py b/pipenv/vendor/packaging/tags.py index 300faab847..9064910b8b 100644 --- a/pipenv/vendor/packaging/tags.py +++ b/pipenv/vendor/packaging/tags.py @@ -22,9 +22,9 @@ import sysconfig import warnings -from ._typing import MYPY_CHECK_RUNNING, cast +from ._typing import TYPE_CHECKING, cast -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import ( Dict, FrozenSet, @@ -58,6 +58,12 @@ class Tag(object): + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ __slots__ = ["_interpreter", "_abi", "_platform"] @@ -108,6 +114,12 @@ def __repr__(self): def parse_tag(tag): # type: (str) -> FrozenSet[Tag] + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ tags = set() interpreters, abis, platforms = tag.split("-") for interpreter in interpreters.split("."): @@ -541,7 +553,7 @@ def __init__(self, file): def unpack(fmt): # type: (str) -> int try: - result, = struct.unpack( + (result,) = struct.unpack( fmt, file.read(struct.calcsize(fmt)) ) # type: (int, ) except struct.error: diff --git a/pipenv/vendor/packaging/utils.py b/pipenv/vendor/packaging/utils.py index 44f1bf9873..19579c1a0f 100644 --- a/pipenv/vendor/packaging/utils.py +++ b/pipenv/vendor/packaging/utils.py @@ -5,19 +5,22 @@ import re -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING, cast from .version import InvalidVersion, Version -if MYPY_CHECK_RUNNING: # pragma: no cover - from typing import Union +if TYPE_CHECKING: # pragma: no cover + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) _canonicalize_regex = re.compile(r"[-_.]+") def canonicalize_name(name): - # type: (str) -> str + # type: (str) -> NormalizedName # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) def canonicalize_version(_version): diff --git a/pipenv/vendor/packaging/version.py b/pipenv/vendor/packaging/version.py index f39a2a12a1..00371e86a8 100644 --- a/pipenv/vendor/packaging/version.py +++ b/pipenv/vendor/packaging/version.py @@ -8,9 +8,9 @@ import re from ._structures import Infinity, NegativeInfinity -from ._typing import MYPY_CHECK_RUNNING +from ._typing import TYPE_CHECKING -if MYPY_CHECK_RUNNING: # pragma: no cover +if TYPE_CHECKING: # pragma: no cover from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union from ._structures import InfinityType, NegativeInfinityType diff --git a/pipenv/vendor/parse.py b/pipenv/vendor/parse.py index 9c8cae7047..25bad537d1 100644 --- a/pipenv/vendor/parse.py +++ b/pipenv/vendor/parse.py @@ -9,30 +9,38 @@ From there it's a simple thing to parse a string: ->>> parse("It's {}, I love it!", "It's spam, I love it!") - ->>> _[0] -'spam' +.. code-block:: pycon + + >>> parse("It's {}, I love it!", "It's spam, I love it!") + + >>> _[0] + 'spam' Or to search a string for some pattern: ->>> search('Age: {:d}\n', 'Name: Rufus\nAge: 42\nColor: red\n') - +.. code-block:: pycon + + >>> search('Age: {:d}\n', 'Name: Rufus\nAge: 42\nColor: red\n') + Or find all the occurrences of some pattern in a string: ->>> ''.join(r[0] for r in findall(">{}<", "

the bold text

")) -'the bold text' +.. code-block:: pycon + + >>> ''.join(r[0] for r in findall(">{}<", "

the bold text

")) + 'the bold text' If you're going to use the same pattern to match lots of strings you can compile it once: ->>> from parse import compile ->>> p = compile("It's {}, I love it!") ->>> print(p) - ->>> p.parse("It's spam, I love it!") - +.. code-block:: pycon + + >>> from parse import compile + >>> p = compile("It's {}, I love it!") + >>> print(p) + + >>> p.parse("It's spam, I love it!") + ("compile" is not exported for ``import *`` usage as it would override the built-in ``compile()`` function) @@ -40,8 +48,10 @@ The default behaviour is to match strings case insensitively. You may match with case by specifying `case_sensitive=True`: ->>> parse('SPAM', 'spam', case_sensitive=True) is None -True +.. code-block:: pycon + + >>> parse('SPAM', 'spam', case_sensitive=True) is None + True Format Syntax @@ -64,40 +74,44 @@ Some simple parse() format string examples: ->>> parse("Bring me a {}", "Bring me a shrubbery") - ->>> r = parse("The {} who say {}", "The knights who say Ni!") ->>> print(r) - ->>> print(r.fixed) -('knights', 'Ni!') ->>> r = parse("Bring out the holy {item}", "Bring out the holy hand grenade") ->>> print(r) - ->>> print(r.named) -{'item': 'hand grenade'} ->>> print(r['item']) -hand grenade ->>> 'item' in r -True +.. code-block:: pycon + + >>> parse("Bring me a {}", "Bring me a shrubbery") + + >>> r = parse("The {} who say {}", "The knights who say Ni!") + >>> print(r) + + >>> print(r.fixed) + ('knights', 'Ni!') + >>> r = parse("Bring out the holy {item}", "Bring out the holy hand grenade") + >>> print(r) + + >>> print(r.named) + {'item': 'hand grenade'} + >>> print(r['item']) + hand grenade + >>> 'item' in r + True Note that `in` only works if you have named fields. Dotted names and indexes are possible though the application must make additional sense of the result: ->>> r = parse("Mmm, {food.type}, I love it!", "Mmm, spam, I love it!") ->>> print(r) - ->>> print(r.named) -{'food.type': 'spam'} ->>> print(r['food.type']) -spam ->>> r = parse("My quest is {quest[name]}", "My quest is to seek the holy grail!") ->>> print(r) - ->>> print(r['quest']) -{'name': 'to seek the holy grail!'} ->>> print(r['quest']['name']) -to seek the holy grail! +.. code-block:: pycon + + >>> r = parse("Mmm, {food.type}, I love it!", "Mmm, spam, I love it!") + >>> print(r) + + >>> print(r.named) + {'food.type': 'spam'} + >>> print(r['food.type']) + spam + >>> r = parse("My quest is {quest[name]}", "My quest is to seek the holy grail!") + >>> print(r) + + >>> print(r['quest']) + {'name': 'to seek the holy grail!'} + >>> print(r['quest']['name']) + to seek the holy grail! If the text you're matching has braces in it you can match those by including a double-brace ``{{`` or ``}}`` in your format string, just like format() does. @@ -174,18 +188,22 @@ Some examples of typed parsing with ``None`` returned if the typing does not match: ->>> parse('Our {:d} {:w} are...', 'Our 3 weapons are...') - ->>> parse('Our {:d} {:w} are...', 'Our three weapons are...') ->>> parse('Meet at {:tg}', 'Meet at 1/2/2011 11:00 PM') - +.. code-block:: pycon + + >>> parse('Our {:d} {:w} are...', 'Our 3 weapons are...') + + >>> parse('Our {:d} {:w} are...', 'Our three weapons are...') + >>> parse('Meet at {:tg}', 'Meet at 1/2/2011 11:00 PM') + And messing about with alignment: ->>> parse('with {:>} herring', 'with a herring') - ->>> parse('spam {:^} spam', 'spam lovely spam') - +.. code-block:: pycon + + >>> parse('with {:>} herring', 'with a herring') + + >>> parse('spam {:^} spam', 'spam lovely spam') + Note that the "center" alignment does not test to make sure the value is centered - it just strips leading and trailing whitespace. @@ -194,14 +212,16 @@ from the input. Width specifies a minimum size and precision specifies a maximum. For example: ->>> parse('{:.2}{:.2}', 'look') # specifying precision - ->>> parse('{:4}{:4}', 'look at that') # specifying width - ->>> parse('{:4}{:.4}', 'look at that') # specifying both - ->>> parse('{:2d}{:2d}', '0440') # parsing two contiguous numbers - +.. code-block:: pycon + + >>> parse('{:.2}{:.2}', 'look') # specifying precision + + >>> parse('{:4}{:4}', 'look at that') # specifying width + + >>> parse('{:4}{:.4}', 'look at that') # specifying both + + >>> parse('{:2d}{:2d}', '0440') # parsing two contiguous numbers + Some notes for the date and time types: @@ -246,18 +266,18 @@ The ``Result`` instance has three attributes: -fixed +``fixed`` A tuple of the fixed-position, anonymous fields extracted from the input. -named +``named`` A dictionary of the named fields extracted from the input. -spans +``spans`` A dictionary mapping the names and fixed position indices matched to a 2-tuple slice range of where the match occurred in the input. The span does not include any stripped padding (alignment or width). The ``Match`` instance has one method: -evaluate_result() +``evaluate_result()`` Generates and returns a ``Result`` instance for this ``Match`` object. @@ -273,56 +293,66 @@ will be substituted in the ``Result`` instance for that field. Your custom type conversions may override the builtin types if you supply one -with the same identifier. +with the same identifier: + +.. code-block:: pycon ->>> def shouty(string): -... return string.upper() -... ->>> parse('{:shouty} world', 'hello world', dict(shouty=shouty)) - + >>> def shouty(string): + ... return string.upper() + ... + >>> parse('{:shouty} world', 'hello world', dict(shouty=shouty)) + If the type converter has the optional ``pattern`` attribute, it is used as -regular expression for better pattern matching (instead of the default one). +regular expression for better pattern matching (instead of the default one): ->>> def parse_number(text): -... return int(text) ->>> parse_number.pattern = r'\d+' ->>> parse('Answer: {number:Number}', 'Answer: 42', dict(Number=parse_number)) - ->>> _ = parse('Answer: {:Number}', 'Answer: Alice', dict(Number=parse_number)) ->>> assert _ is None, "MISMATCH" +.. code-block:: pycon + + >>> def parse_number(text): + ... return int(text) + >>> parse_number.pattern = r'\d+' + >>> parse('Answer: {number:Number}', 'Answer: 42', dict(Number=parse_number)) + + >>> _ = parse('Answer: {:Number}', 'Answer: Alice', dict(Number=parse_number)) + >>> assert _ is None, "MISMATCH" You can also use the ``with_pattern(pattern)`` decorator to add this information to a type converter function: ->>> from parse import with_pattern ->>> @with_pattern(r'\d+') -... def parse_number(text): -... return int(text) ->>> parse('Answer: {number:Number}', 'Answer: 42', dict(Number=parse_number)) - +.. code-block:: pycon + + >>> from parse import with_pattern + >>> @with_pattern(r'\d+') + ... def parse_number(text): + ... return int(text) + >>> parse('Answer: {number:Number}', 'Answer: 42', dict(Number=parse_number)) + A more complete example of a custom type might be: ->>> yesno_mapping = { -... "yes": True, "no": False, -... "on": True, "off": False, -... "true": True, "false": False, -... } ->>> @with_pattern(r"|".join(yesno_mapping)) -... def parse_yesno(text): -... return yesno_mapping[text.lower()] +.. code-block:: pycon + + >>> yesno_mapping = { + ... "yes": True, "no": False, + ... "on": True, "off": False, + ... "true": True, "false": False, + ... } + >>> @with_pattern(r"|".join(yesno_mapping)) + ... def parse_yesno(text): + ... return yesno_mapping[text.lower()] If the type converter ``pattern`` uses regex-grouping (with parenthesis), you should indicate this by using the optional ``regex_group_count`` parameter in the ``with_pattern()`` decorator: ->>> @with_pattern(r'((\d+))', regex_group_count=2) -... def parse_number2(text): -... return int(text) ->>> parse('Answer: {:Number2} {:Number2}', 'Answer: 42 43', dict(Number2=parse_number2)) - +.. code-block:: pycon + + >>> @with_pattern(r'((\d+))', regex_group_count=2) + ... def parse_number2(text): + ... return int(text) + >>> parse('Answer: {:Number2} {:Number2}', 'Answer: 42 43', dict(Number2=parse_number2)) + Otherwise, this may cause parsing problems with unnamed/fixed parameters. @@ -330,22 +360,26 @@ Potential Gotchas ----------------- -`parse()` will always match the shortest text necessary (from left to right) +``parse()`` will always match the shortest text necessary (from left to right) to fulfil the parse pattern, so for example: ->>> pattern = '{dir1}/{dir2}' ->>> data = 'root/parent/subdir' ->>> sorted(parse(pattern, data).named.items()) -[('dir1', 'root'), ('dir2', 'parent/subdir')] + +.. code-block:: pycon + + >>> pattern = '{dir1}/{dir2}' + >>> data = 'root/parent/subdir' + >>> sorted(parse(pattern, data).named.items()) + [('dir1', 'root'), ('dir2', 'parent/subdir')] So, even though `{'dir1': 'root/parent', 'dir2': 'subdir'}` would also fit the pattern, the actual match represents the shortest successful match for -`dir1`. +``dir1``. ---- -**Version history (in brief)**: - +- 1.18.0 Correct bug in int parsing introduced in 1.16.0 (thanks @maxxk) +- 1.17.0 Make left- and center-aligned search consume up to next space +- 1.16.0 Make compiled parse objects pickleable (thanks @martinResearch) - 1.15.0 Several fixes for parsing non-base 10 numbers (thanks @vladikcomper) - 1.14.0 More broad acceptance of Fortran number format (thanks @purpleskyfall) - 1.13.1 Project metadata correction. @@ -419,12 +453,13 @@ and removed the restriction on mixing fixed-position and named fields - 1.0.0 initial release -This code is copyright 2012-2019 Richard Jones +This code is copyright 2012-2020 Richard Jones See the end of the source file for the license of use. ''' from __future__ import absolute_import -__version__ = '1.15.0' + +__version__ = '1.18.0' # yes, I now have two problems import re @@ -461,29 +496,35 @@ def with_pattern(pattern, regex_group_count=None): :param regex_group_count: Indicates how many regex-groups are in pattern. :return: wrapped function """ + def decorator(func): func.pattern = pattern func.regex_group_count = regex_group_count return func + return decorator -def int_convert(base=None): - '''Convert a string to an integer. +class int_convert: + """Convert a string to an integer. The string may start with a sign. It may be of a base other than 2, 8, 10 or 16. If base isn't specified, it will be detected automatically based - on a string format. When string starts with a base indicator, 0#nnnn, + on a string format. When string starts with a base indicator, 0#nnnn, it overrides the default base of 10. It may also have other non-numeric characters that we can ignore. - ''' + """ + CHARS = '0123456789abcdefghijklmnopqrstuvwxyz' - def f(string, match, base=base): + def __init__(self, base=None): + self.base = base + + def __call__(self, string, match): if string[0] == '-': sign = -1 number_start = 1 @@ -494,34 +535,46 @@ def f(string, match, base=base): sign = 1 number_start = 0 + base = self.base # If base wasn't specified, detect it automatically if base is None: - # Assume decimal number, unless different base is detected - base = 10 + # Assume decimal number, unless different base is detected + base = 10 - # For number formats starting with 0b, 0o, 0x, use corresponding base ... - if string[number_start] == '0' and len(string) - number_start > 2: - if string[number_start+1] in 'bB': - base = 2 - elif string[number_start+1] in 'oO': - base = 8 - elif string[number_start+1] in 'xX': - base = 16 + # For number formats starting with 0b, 0o, 0x, use corresponding base ... + if string[number_start] == '0' and len(string) - number_start > 2: + if string[number_start + 1] in 'bB': + base = 2 + elif string[number_start + 1] in 'oO': + base = 8 + elif string[number_start + 1] in 'xX': + base = 16 - chars = CHARS[:base] + chars = int_convert.CHARS[: base] string = re.sub('[^%s]' % chars, '', string.lower()) return sign * int(string, base) - return f + + +class convert_first: + """Convert the first element of a pair. + This equivalent to lambda s,m: converter(s). But unlike a lambda function, it can be pickled + """ + + def __init__(self, converter): + self.converter = converter + + def __call__(self, string, match): + return self.converter(string) def percentage(string, match): - return float(string[:-1]) / 100. + return float(string[:-1]) / 100.0 class FixedTzOffset(tzinfo): - """Fixed offset in minutes east from UTC. - """ + """Fixed offset in minutes east from UTC.""" + ZERO = timedelta(0) def __init__(self, offset, name): @@ -529,8 +582,7 @@ def __init__(self, offset, name): self._name = name def __repr__(self): - return '<%s %s %s>' % (self.__class__.__name__, self._name, - self._offset) + return '<%s %s %s>' % (self.__class__.__name__, self._name, self._offset) def utcoffset(self, dt): return self._offset @@ -548,18 +600,29 @@ def __eq__(self, other): MONTHS_MAP = dict( - Jan=1, January=1, - Feb=2, February=2, - Mar=3, March=3, - Apr=4, April=4, + Jan=1, + January=1, + Feb=2, + February=2, + Mar=3, + March=3, + Apr=4, + April=4, May=5, - Jun=6, June=6, - Jul=7, July=7, - Aug=8, August=8, - Sep=9, September=9, - Oct=10, October=10, - Nov=11, November=11, - Dec=12, December=12 + Jun=6, + June=6, + Jul=7, + July=7, + Aug=8, + August=8, + Sep=9, + September=9, + Oct=10, + October=10, + Nov=11, + November=11, + Dec=12, + December=12, ) DAYS_PAT = r'(Mon|Tue|Wed|Thu|Fri|Sat|Sun)' MONTHS_PAT = r'(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)' @@ -569,17 +632,28 @@ def __eq__(self, other): TZ_PAT = r'(\s+[-+]\d\d?:?\d\d)' -def date_convert(string, match, ymd=None, mdy=None, dmy=None, - d_m_y=None, hms=None, am=None, tz=None, mm=None, dd=None): - '''Convert the incoming string containing some date / time info into a +def date_convert( + string, + match, + ymd=None, + mdy=None, + dmy=None, + d_m_y=None, + hms=None, + am=None, + tz=None, + mm=None, + dd=None, +): + """Convert the incoming string containing some date / time info into a datetime instance. - ''' + """ groups = match.groups() time_only = False if mm and dd: - y=datetime.today().year - m=groups[mm] - d=groups[dd] + y = datetime.today().year + m = groups[mm] + d = groups[dd] elif ymd is not None: y, m, d = re.split(r'[-/\s]', groups[ymd]) elif mdy is not None: @@ -670,13 +744,11 @@ class RepeatedNameError(ValueError): REGEX_SAFETY = re.compile(r'([?\\\\.[\]()*+\^$!\|])') # allowed field types -ALLOWED_TYPES = set(list('nbox%fFegwWdDsSl') + - ['t' + c for c in 'ieahgcts']) +ALLOWED_TYPES = set(list('nbox%fFegwWdDsSl') + ['t' + c for c in 'ieahgcts']) def extract_format(format, extra_types): - '''Pull apart the format [[fill]align][0][width][.precision][type] - ''' + """Pull apart the format [[fill]align][0][width][.precision][type]""" fill = align = None if format[0] in '<>=^': align = format[0] @@ -721,8 +793,8 @@ def extract_format(format, extra_types): class Parser(object): - '''Encapsulate a format string that may be used to parse other strings. - ''' + """Encapsulate a format string that may be used to parse other strings.""" + def __init__(self, format, extra_types=None, case_sensitive=False): # a mapping of a name as in {hello.world} to a regex-group compatible # name, like hello__world Its used to prevent the transformation of @@ -756,8 +828,7 @@ def __init__(self, format, extra_types=None, case_sensitive=False): def __repr__(self): if len(self._format) > 20: - return '<%s %r>' % (self.__class__.__name__, - self._format[:17] + '...') + return '<%s %r>' % (self.__class__.__name__, self._format[:17] + '...') return '<%s %r>' % (self.__class__.__name__, self._format) @property @@ -769,8 +840,9 @@ def _search_re(self): # access error through sys to keep py3k and backward compat e = str(sys.exc_info()[1]) if e.endswith('this version only supports 100 named groups'): - raise TooManyFields('sorry, you are attempting to parse ' - 'too many complex fields') + raise TooManyFields( + 'sorry, you are attempting to parse ' 'too many complex fields' + ) return self.__search_re @property @@ -783,19 +855,29 @@ def _match_re(self): # access error through sys to keep py3k and backward compat e = str(sys.exc_info()[1]) if e.endswith('this version only supports 100 named groups'): - raise TooManyFields('sorry, you are attempting to parse ' - 'too many complex fields') + raise TooManyFields( + 'sorry, you are attempting to parse ' 'too many complex fields' + ) except re.error: - raise NotImplementedError("Group names (e.g. (?P) can " - "cause failure, as they are not escaped properly: '%s'" % - expression) + raise NotImplementedError( + "Group names (e.g. (?P) can " + "cause failure, as they are not escaped properly: '%s'" % expression + ) return self.__match_re + @property + def named_fields(self): + return self._named_fields.copy() + + @property + def fixed_fields(self): + return self._fixed_fields.copy() + def parse(self, string, evaluate_result=True): - '''Match my format to the string exactly. + """Match my format to the string exactly. Return a Result or Match instance or None if there's no match. - ''' + """ m = self._match_re.match(string) if m is None: return None @@ -806,7 +888,7 @@ def parse(self, string, evaluate_result=True): return Match(self, m) def search(self, string, pos=0, endpos=None, evaluate_result=True): - '''Search the string for my format. + """Search the string for my format. Optionally start the search at "pos" character index and limit the search to a maximum index of endpos - equivalent to @@ -816,7 +898,7 @@ def search(self, string, pos=0, endpos=None, evaluate_result=True): Match instance is returned instead of the actual Result instance. Return either a Result instance or None if there's no match. - ''' + """ if endpos is None: endpos = len(string) m = self._search_re.search(string, pos, endpos) @@ -828,8 +910,10 @@ def search(self, string, pos=0, endpos=None, evaluate_result=True): else: return Match(self, m) - def findall(self, string, pos=0, endpos=None, extra_types=None, evaluate_result=True): - '''Search "string" for all occurrences of "format". + def findall( + self, string, pos=0, endpos=None, extra_types=None, evaluate_result=True + ): + """Search "string" for all occurrences of "format". Optionally start the search at "pos" character index and limit the search to a maximum index of endpos - equivalent to @@ -837,10 +921,12 @@ def findall(self, string, pos=0, endpos=None, extra_types=None, evaluate_result= Returns an iterator that holds Result or Match instances for each format match found. - ''' + """ if endpos is None: endpos = len(string) - return ResultIterator(self, string, pos, endpos, evaluate_result=evaluate_result) + return ResultIterator( + self, string, pos, endpos, evaluate_result=evaluate_result + ) def _expand_named_fields(self, named_fields): result = {} @@ -854,7 +940,7 @@ def _expand_named_fields(self, named_fields): if subkeys: for subkey in re.findall(r'\[[^\]]+\]', subkeys): - d = d.setdefault(k,{}) + d = d.setdefault(k, {}) k = subkey[1:-1] # assign the value to the last key @@ -887,8 +973,7 @@ def evaluate_result(self, m): # now figure the match spans spans = dict((n, m.span(name_map[n])) for n in named_fields) - spans.update((i, m.span(n + 1)) - for i, n in enumerate(self._fixed_fields)) + spans.update((i, m.span(n + 1)) for i, n in enumerate(self._fixed_fields)) # and that's our result return Result(fixed_fields, self._expand_named_fields(named_fields), spans) @@ -949,9 +1034,11 @@ def _handle_field(self, field): name = field if name in self._name_to_group_map: if self._name_types[name] != format: - raise RepeatedNameError('field type %r for field "%s" ' - 'does not match previous seen type %r' % (format, - name, self._name_types[name])) + raise RepeatedNameError( + 'field type %r for field "%s" ' + 'does not match previous seen type %r' + % (format, name, self._name_types[name]) + ) group = self._name_to_group_map[name] # match previously-seen value return r'(?P=%s)' % group @@ -986,10 +1073,7 @@ def _handle_field(self, field): if regex_group_count is None: regex_group_count = 0 self._group_index += regex_group_count - - def f(string, m): - return type_converter(string) - self._type_conversions[group] = f + self._type_conversions[group] = convert_first(type_converter) elif type == 'n': s = r'\d{1,3}([,.]\d{3})*' self._group_index += 1 @@ -1012,79 +1096,104 @@ def f(string, m): self._type_conversions[group] = percentage elif type == 'f': s = r'\d*\.\d+' - self._type_conversions[group] = lambda s, m: float(s) + self._type_conversions[group] = convert_first(float) elif type == 'F': s = r'\d*\.\d+' - self._type_conversions[group] = lambda s, m: Decimal(s) + self._type_conversions[group] = convert_first(Decimal) elif type == 'e': s = r'\d*\.\d+[eE][-+]?\d+|nan|NAN|[-+]?inf|[-+]?INF' - self._type_conversions[group] = lambda s, m: float(s) + self._type_conversions[group] = convert_first(float) elif type == 'g': s = r'\d+(\.\d+)?([eE][-+]?\d+)?|nan|NAN|[-+]?inf|[-+]?INF' self._group_index += 2 - self._type_conversions[group] = lambda s, m: float(s) + self._type_conversions[group] = convert_first(float) elif type == 'd': if format.get('width'): width = r'{1,%s}' % int(format['width']) else: width = '+' - s = r'\d{w}|[-+ ]?0[xX][0-9a-fA-F]{w}|[-+ ]?0[bB][01]{w}|[-+ ]?0[oO][0-7]{w}'.format(w=width) - self._type_conversions[group] = int_convert() # do not specify numeber base, determine it automatically + s = r'\d{w}|[-+ ]?0[xX][0-9a-fA-F]{w}|[-+ ]?0[bB][01]{w}|[-+ ]?0[oO][0-7]{w}'.format( + w=width + ) + self._type_conversions[ + group + ] = int_convert() # do not specify number base, determine it automatically elif type == 'ti': - s = r'(\d{4}-\d\d-\d\d)((\s+|T)%s)?(Z|\s*[-+]\d\d:?\d\d)?' % \ - TIME_PAT + s = r'(\d{4}-\d\d-\d\d)((\s+|T)%s)?(Z|\s*[-+]\d\d:?\d\d)?' % TIME_PAT n = self._group_index - self._type_conversions[group] = partial(date_convert, ymd=n + 1, - hms=n + 4, tz=n + 7) + self._type_conversions[group] = partial( + date_convert, ymd=n + 1, hms=n + 4, tz=n + 7 + ) self._group_index += 7 elif type == 'tg': s = r'(\d{1,2}[-/](\d{1,2}|%s)[-/]\d{4})(\s+%s)?%s?%s?' % ( - ALL_MONTHS_PAT, TIME_PAT, AM_PAT, TZ_PAT) + ALL_MONTHS_PAT, + TIME_PAT, + AM_PAT, + TZ_PAT, + ) n = self._group_index - self._type_conversions[group] = partial(date_convert, dmy=n + 1, - hms=n + 5, am=n + 8, tz=n + 9) + self._type_conversions[group] = partial( + date_convert, dmy=n + 1, hms=n + 5, am=n + 8, tz=n + 9 + ) self._group_index += 9 elif type == 'ta': s = r'((\d{1,2}|%s)[-/]\d{1,2}[-/]\d{4})(\s+%s)?%s?%s?' % ( - ALL_MONTHS_PAT, TIME_PAT, AM_PAT, TZ_PAT) + ALL_MONTHS_PAT, + TIME_PAT, + AM_PAT, + TZ_PAT, + ) n = self._group_index - self._type_conversions[group] = partial(date_convert, mdy=n + 1, - hms=n + 5, am=n + 8, tz=n + 9) + self._type_conversions[group] = partial( + date_convert, mdy=n + 1, hms=n + 5, am=n + 8, tz=n + 9 + ) self._group_index += 9 elif type == 'te': # this will allow microseconds through if they're present, but meh - s = r'(%s,\s+)?(\d{1,2}\s+%s\s+\d{4})\s+%s%s' % (DAYS_PAT, - MONTHS_PAT, TIME_PAT, TZ_PAT) + s = r'(%s,\s+)?(\d{1,2}\s+%s\s+\d{4})\s+%s%s' % ( + DAYS_PAT, + MONTHS_PAT, + TIME_PAT, + TZ_PAT, + ) n = self._group_index - self._type_conversions[group] = partial(date_convert, dmy=n + 3, - hms=n + 5, tz=n + 8) + self._type_conversions[group] = partial( + date_convert, dmy=n + 3, hms=n + 5, tz=n + 8 + ) self._group_index += 8 elif type == 'th': # slight flexibility here from the stock Apache format - s = r'(\d{1,2}[-/]%s[-/]\d{4}):%s%s' % (MONTHS_PAT, TIME_PAT, - TZ_PAT) + s = r'(\d{1,2}[-/]%s[-/]\d{4}):%s%s' % (MONTHS_PAT, TIME_PAT, TZ_PAT) n = self._group_index - self._type_conversions[group] = partial(date_convert, dmy=n + 1, - hms=n + 3, tz=n + 6) + self._type_conversions[group] = partial( + date_convert, dmy=n + 1, hms=n + 3, tz=n + 6 + ) self._group_index += 6 elif type == 'tc': s = r'(%s)\s+%s\s+(\d{1,2})\s+%s\s+(\d{4})' % ( - DAYS_PAT, MONTHS_PAT, TIME_PAT) + DAYS_PAT, + MONTHS_PAT, + TIME_PAT, + ) n = self._group_index - self._type_conversions[group] = partial(date_convert, - d_m_y=(n + 4, n + 3, n + 8), hms=n + 5) + self._type_conversions[group] = partial( + date_convert, d_m_y=(n + 4, n + 3, n + 8), hms=n + 5 + ) self._group_index += 8 elif type == 'tt': s = r'%s?%s?%s?' % (TIME_PAT, AM_PAT, TZ_PAT) n = self._group_index - self._type_conversions[group] = partial(date_convert, hms=n + 1, - am=n + 4, tz=n + 5) + self._type_conversions[group] = partial( + date_convert, hms=n + 1, am=n + 4, tz=n + 5 + ) self._group_index += 5 elif type == 'ts': s = r'%s(\s+)(\d+)(\s+)(\d{1,2}:\d{1,2}:\d{1,2})?' % MONTHS_PAT n = self._group_index - self._type_conversions[group] = partial(date_convert, mm=n+1, dd=n+3, - hms=n + 5) + self._type_conversions[group] = partial( + date_convert, mm=n + 1, dd=n + 3, hms=n + 5 + ) self._group_index += 5 elif type == 'l': s = r'[A-Za-z]+' @@ -1138,24 +1247,25 @@ def f(string, m): # align "=" has been handled if align == '<': - s = '%s%s*' % (s, fill) + s = '%s%s+' % (s, fill) elif align == '>': s = '%s*%s' % (fill, s) elif align == '^': - s = '%s*%s%s*' % (fill, s, fill) + s = '%s*%s%s+' % (fill, s, fill) return s class Result(object): - '''The result of a parse() or search(). + """The result of a parse() or search(). Fixed results may be looked up using `result[index]`. Named results may be looked up using `result['name']`. Named results may be tested for existence using `'name' in result`. - ''' + """ + def __init__(self, fixed, named, spans): self.fixed = fixed self.named = named @@ -1167,19 +1277,19 @@ def __getitem__(self, item): return self.named[item] def __repr__(self): - return '<%s %r %r>' % (self.__class__.__name__, self.fixed, - self.named) + return '<%s %r %r>' % (self.__class__.__name__, self.fixed, self.named) def __contains__(self, name): return name in self.named class Match(object): - '''The result of a parse() or search() if no results are generated. + """The result of a parse() or search() if no results are generated. This class is only used to expose internal used regex match objects to the user and use them for external Parser.evaluate_result calls. - ''' + """ + def __init__(self, parser, match): self.parser = parser self.match = match @@ -1190,10 +1300,11 @@ def evaluate_result(self): class ResultIterator(object): - '''The result of a findall() operation. + """The result of a findall() operation. Each element is a Result instance. - ''' + """ + def __init__(self, parser, string, pos, endpos, evaluate_result=True): self.parser = parser self.string = string @@ -1220,7 +1331,7 @@ def __next__(self): def parse(format, string, extra_types=None, evaluate_result=True, case_sensitive=False): - '''Using "format" attempt to pull values from "string". + """Using "format" attempt to pull values from "string". The format must match the string contents exactly. If the value you're looking for is instead just a part of the string use @@ -1244,14 +1355,21 @@ def parse(format, string, extra_types=None, evaluate_result=True, case_sensitive See the module documentation for the use of "extra_types". In the case there is no match parse() will return None. - ''' + """ p = Parser(format, extra_types=extra_types, case_sensitive=case_sensitive) return p.parse(string, evaluate_result=evaluate_result) -def search(format, string, pos=0, endpos=None, extra_types=None, evaluate_result=True, - case_sensitive=False): - '''Search "string" for the first occurrence of "format". +def search( + format, + string, + pos=0, + endpos=None, + extra_types=None, + evaluate_result=True, + case_sensitive=False, +): + """Search "string" for the first occurrence of "format". The format may occur anywhere within the string. If instead you wish for the format to exactly match the string @@ -1278,14 +1396,21 @@ def search(format, string, pos=0, endpos=None, extra_types=None, evaluate_result See the module documentation for the use of "extra_types". In the case there is no match parse() will return None. - ''' + """ p = Parser(format, extra_types=extra_types, case_sensitive=case_sensitive) return p.search(string, pos, endpos, evaluate_result=evaluate_result) -def findall(format, string, pos=0, endpos=None, extra_types=None, evaluate_result=True, - case_sensitive=False): - '''Search "string" for all occurrences of "format". +def findall( + format, + string, + pos=0, + endpos=None, + extra_types=None, + evaluate_result=True, + case_sensitive=False, +): + """Search "string" for all occurrences of "format". You will be returned an iterator that holds Result instances for each format match found. @@ -1309,13 +1434,13 @@ def findall(format, string, pos=0, endpos=None, extra_types=None, evaluate_resul If the format is invalid a ValueError will be raised. See the module documentation for the use of "extra_types". - ''' + """ p = Parser(format, extra_types=extra_types, case_sensitive=case_sensitive) - return Parser(format, extra_types=extra_types).findall(string, pos, endpos, evaluate_result=evaluate_result) + return p.findall(string, pos, endpos, evaluate_result=evaluate_result) def compile(format, extra_types=None, case_sensitive=False): - '''Create a Parser instance to parse "format". + """Create a Parser instance to parse "format". The resultant Parser has a method .parse(string) which behaves in the same manner as parse(format, string). @@ -1329,7 +1454,7 @@ def compile(format, extra_types=None, case_sensitive=False): See the module documentation for the use of "extra_types". Returns a Parser instance. - ''' + """ return Parser(format, extra_types=extra_types, case_sensitive=case_sensitive) diff --git a/pipenv/vendor/pep517/__init__.py b/pipenv/vendor/pep517/__init__.py index 7355b68a24..10687486e2 100644 --- a/pipenv/vendor/pep517/__init__.py +++ b/pipenv/vendor/pep517/__init__.py @@ -1,4 +1,6 @@ """Wrappers to build Python packages using PEP 517 hooks """ -__version__ = '0.8.2' +__version__ = '0.9.1' + +from .wrappers import * # noqa: F401, F403 diff --git a/pipenv/vendor/pep517/wrappers.py b/pipenv/vendor/pep517/wrappers.py index 00a3d1a789..d6338ea520 100644 --- a/pipenv/vendor/pep517/wrappers.py +++ b/pipenv/vendor/pep517/wrappers.py @@ -9,6 +9,15 @@ from . import compat +__all__ = [ + 'BackendUnavailable', + 'BackendInvalid', + 'HookMissing', + 'UnsupportedOperation', + 'default_subprocess_runner', + 'quiet_subprocess_runner', + 'Pep517HookCaller', +] try: import importlib.resources as resources @@ -102,19 +111,22 @@ def norm_and_check(source_tree, requested): class Pep517HookCaller(object): """A wrapper around a source directory to be built with a PEP 517 backend. - source_dir : The path to the source directory, containing pyproject.toml. - build_backend : The build backend spec, as per PEP 517, from + :param source_dir: The path to the source directory, containing pyproject.toml. - backend_path : The backend path, as per PEP 517, from pyproject.toml. - runner : A callable that invokes the wrapper subprocess. + :param build_backend: The build backend spec, as per PEP 517, from + pyproject.toml. + :param backend_path: The backend path, as per PEP 517, from pyproject.toml. + :param runner: A callable that invokes the wrapper subprocess. + :param python_executable: The Python executable used to invoke the backend The 'runner', if provided, must expect the following: - cmd : a list of strings representing the command and arguments to - execute, as would be passed to e.g. 'subprocess.check_call'. - cwd : a string representing the working directory that must be - used for the subprocess. Corresponds to the provided source_dir. - extra_environ : a dict mapping environment variable names to values - which must be set for the subprocess execution. + + - cmd: a list of strings representing the command and arguments to + execute, as would be passed to e.g. 'subprocess.check_call'. + - cwd: a string representing the working directory that must be + used for the subprocess. Corresponds to the provided source_dir. + - extra_environ: a dict mapping environment variable names to values + which must be set for the subprocess execution. """ def __init__( self, @@ -122,6 +134,7 @@ def __init__( build_backend, backend_path=None, runner=None, + python_executable=None, ): if runner is None: runner = default_subprocess_runner @@ -134,6 +147,9 @@ def __init__( ] self.backend_path = backend_path self._subprocess_runner = runner + if not python_executable: + python_executable = sys.executable + self.python_executable = python_executable @contextmanager def subprocess_runner(self, runner): @@ -150,7 +166,8 @@ def subprocess_runner(self, runner): def get_requires_for_build_wheel(self, config_settings=None): """Identify packages required for building a wheel - Returns a list of dependency specifications, e.g.: + Returns a list of dependency specifications, e.g.:: + ["wheel >= 0.25", "setuptools"] This does not include requirements specified in pyproject.toml. @@ -164,7 +181,7 @@ def get_requires_for_build_wheel(self, config_settings=None): def prepare_metadata_for_build_wheel( self, metadata_directory, config_settings=None, _allow_fallback=True): - """Prepare a *.dist-info folder with metadata for this project. + """Prepare a ``*.dist-info`` folder with metadata for this project. Returns the name of the newly created folder. @@ -202,7 +219,8 @@ def build_wheel( def get_requires_for_build_sdist(self, config_settings=None): """Identify packages required for building a wheel - Returns a list of dependency specifications, e.g.: + Returns a list of dependency specifications, e.g.:: + ["setuptools >= 26"] This does not include requirements specified in pyproject.toml. @@ -252,8 +270,9 @@ def _call_hook(self, hook_name, kwargs): # Run the hook in a subprocess with _in_proc_script_path() as script: + python = self.python_executable self._subprocess_runner( - [sys.executable, str(script), hook_name, td], + [python, abspath(str(script)), hook_name, td], cwd=self.source_dir, extra_environ=extra_environ ) diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py index 899118ccaa..cd67538c86 100644 --- a/pipenv/vendor/pipdeptree.py +++ b/pipenv/vendor/pipdeptree.py @@ -22,7 +22,7 @@ # from graphviz import backend, Digraph -__version__ = '0.13.2' +__version__ = '1.0.0' flatten = chain.from_iterable diff --git a/pipenv/vendor/requests/LICENSE b/pipenv/vendor/requests/LICENSE index 13d91ddc7a..67db858821 100644 --- a/pipenv/vendor/requests/LICENSE +++ b/pipenv/vendor/requests/LICENSE @@ -1,13 +1,175 @@ -Copyright 2019 Kenneth Reitz - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ - https://www.apache.org/licenses/LICENSE-2.0 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/pipenv/vendor/requests/__init__.py b/pipenv/vendor/requests/__init__.py index 626247cbba..c00f556bbc 100644 --- a/pipenv/vendor/requests/__init__.py +++ b/pipenv/vendor/requests/__init__.py @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.25 + # urllib3 >= 1.21.1, <= 1.26 assert major == 1 assert minor >= 21 - assert minor <= 25 + assert minor <= 26 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] @@ -90,14 +90,22 @@ def _check_cryptography(cryptography_version): "version!".format(urllib3.__version__, chardet.__version__), RequestsDependencyWarning) -# Attempt to enable urllib3's SNI support, if possible +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. try: - from urllib3.contrib import pyopenssl - pyopenssl.inject_into_urllib3() + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from urllib3.contrib import pyopenssl + pyopenssl.inject_into_urllib3() - # Check cryptography version - from cryptography import __version__ as cryptography_version - _check_cryptography(cryptography_version) + # Check cryptography version + from cryptography import __version__ as cryptography_version + _check_cryptography(cryptography_version) except ImportError: pass diff --git a/pipenv/vendor/requests/__version__.py b/pipenv/vendor/requests/__version__.py index b9e7df4881..7108520775 100644 --- a/pipenv/vendor/requests/__version__.py +++ b/pipenv/vendor/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'https://requests.readthedocs.io' -__version__ = '2.23.0' -__build__ = 0x022300 +__version__ = '2.25.0' +__build__ = 0x022500 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' diff --git a/pipenv/vendor/requests/exceptions.py b/pipenv/vendor/requests/exceptions.py index a80cad80f1..0e9c820c83 100644 --- a/pipenv/vendor/requests/exceptions.py +++ b/pipenv/vendor/requests/exceptions.py @@ -94,11 +94,11 @@ class ChunkedEncodingError(RequestException): class ContentDecodingError(RequestException, BaseHTTPError): - """Failed to decode response content""" + """Failed to decode response content.""" class StreamConsumedError(RequestException, TypeError): - """The content for this response was already consumed""" + """The content for this response was already consumed.""" class RetryError(RequestException): @@ -106,21 +106,18 @@ class RetryError(RequestException): class UnrewindableBodyError(RequestException): - """Requests encountered an error when trying to rewind a body""" + """Requests encountered an error when trying to rewind a body.""" # Warnings class RequestsWarning(Warning): """Base warning for Requests.""" - pass class FileModeWarning(RequestsWarning, DeprecationWarning): """A file was opened in text mode, but Requests determined its binary length.""" - pass class RequestsDependencyWarning(RequestsWarning): """An imported dependency doesn't match the expected version range.""" - pass diff --git a/pipenv/vendor/requests/models.py b/pipenv/vendor/requests/models.py index 357988327e..ec2edc20b5 100644 --- a/pipenv/vendor/requests/models.py +++ b/pipenv/vendor/requests/models.py @@ -273,7 +273,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): """The fully mutable :class:`PreparedRequest ` object, containing the exact bytes that will be sent to the server. - Generated from either a :class:`Request ` object or manually. + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. Usage:: @@ -473,12 +475,12 @@ def prepare_body(self, data, files, json=None): not isinstance(data, (basestring, list, tuple, Mapping)) ]) - try: - length = super_len(data) - except (TypeError, AttributeError, UnsupportedOperation): - length = None - if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + body = data if getattr(body, 'tell', None) is not None: @@ -916,7 +918,7 @@ def links(self): return l def raise_for_status(self): - """Raises stored :class:`HTTPError`, if one occurred.""" + """Raises :class:`HTTPError`, if one occurred.""" http_error_msg = '' if isinstance(self.reason, bytes): diff --git a/pipenv/vendor/requests/sessions.py b/pipenv/vendor/requests/sessions.py index 2845880bf4..fdf7e9fe35 100644 --- a/pipenv/vendor/requests/sessions.py +++ b/pipenv/vendor/requests/sessions.py @@ -387,6 +387,13 @@ def __init__(self): self.stream = False #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. self.verify = True #: SSL client certificate default, if String, path to ssl client @@ -495,7 +502,12 @@ def request(self, method, url, content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response @@ -658,11 +670,13 @@ def send(self, request, **kwargs): extract_cookies_to_jar(self.cookies, request, r.raw) - # Redirect resolving generator. - gen = self.resolve_redirects(r, request, **kwargs) - # Resolve redirects if allowed. - history = [resp for resp in gen] if allow_redirects else [] + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] # Shuffle things around if there's history. if history: diff --git a/pipenv/vendor/requests/utils.py b/pipenv/vendor/requests/utils.py index c1700d7fe8..16d5776201 100644 --- a/pipenv/vendor/requests/utils.py +++ b/pipenv/vendor/requests/utils.py @@ -169,14 +169,20 @@ def super_len(o): def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" + netrc_file = os.environ.get('NETRC') + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) + try: from netrc import netrc, NetrcParseError netrc_path = None - for f in NETRC_FILES: + for f in netrc_locations: try: - loc = os.path.expanduser('~/{}'.format(f)) + loc = os.path.expanduser(f) except KeyError: # os.path.expanduser can fail when $HOME is undefined and # getpwuid fails. See https://bugs.python.org/issue20164 & @@ -212,7 +218,7 @@ def get_netrc_auth(url, raise_errors=False): if raise_errors: raise - # AppEngine hackiness. + # App Engine hackiness. except (ImportError, AttributeError): pass diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index f8561e233b..a3ce1d75ea 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.5.14" +__version__ = "1.5.16" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 1a610ce792..2608479a6e 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -6,7 +6,7 @@ import functools import os -from pipenv.vendor import attr +import attr import packaging.markers import packaging.version import pip_shims.shims diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index 841fc74c22..3eabc5043f 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -5,7 +5,7 @@ import itertools import os -from pipenv.vendor import attr +import attr import plette.lockfiles import six from vistir.compat import FileNotFoundError, JSONDecodeError, Path diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index b07e444cfc..8593326be0 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -3,7 +3,7 @@ import operator import re -from pipenv.vendor import attr +import attr import distlib.markers import packaging.version import six @@ -673,7 +673,7 @@ def parse_marker_dict(marker_dict): def _contains_micro_version(version_string): - return re.search("\d+\.\d+\.\d+", version_string) is not None + return re.search(r"\d+\.\d+\.\d+", version_string) is not None def format_pyversion(parts): diff --git a/pipenv/vendor/requirementslib/models/metadata.py b/pipenv/vendor/requirementslib/models/metadata.py index 671a311b1b..b45b1f02a8 100644 --- a/pipenv/vendor/requirementslib/models/metadata.py +++ b/pipenv/vendor/requirementslib/models/metadata.py @@ -9,7 +9,7 @@ import zipfile from collections import defaultdict -from pipenv.vendor import attr +import attr import dateutil.parser import distlib.metadata import distlib.wheel diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 9bda73d49a..9c0aea4ea3 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -7,7 +7,7 @@ import os import sys -from pipenv.vendor import attr +import attr import plette.models.base import plette.pipfiles import tomlkit diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py index 4c73823c23..7c1b0e8100 100644 --- a/pipenv/vendor/requirementslib/models/project.py +++ b/pipenv/vendor/requirementslib/models/project.py @@ -6,7 +6,7 @@ import io import os -from pipenv.vendor import attr +import attr import packaging.markers import packaging.utils import plette diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 8578f73ec4..6831a7c830 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -8,12 +8,10 @@ import sys from contextlib import contextmanager from distutils.sysconfig import get_python_lib -from functools import partial -from pipenv.vendor import attr +import attr import pip_shims import six -import vistir from cached_property import cached_property from packaging.markers import Marker from packaging.requirements import Requirement as PackagingRequirement @@ -50,10 +48,6 @@ strip_ssh_from_git_uri, ) from .markers import ( - cleanup_pyspecs, - contains_pyversion, - format_pyversion, - get_contained_pyversions, normalize_marker_str, ) from .setup_info import ( @@ -67,10 +61,10 @@ from .utils import ( DIRECT_URL_RE, HASH_STRING, - URL_RE, build_vcs_uri, convert_direct_url_to_url, create_link, + expand_env_variables, extras_to_string, filter_none, format_requirement, @@ -82,7 +76,6 @@ make_install_requirement, normalize_name, parse_extras, - read_source, specs_to_string, split_markers_from_line, split_ref_from_uri, @@ -926,7 +919,7 @@ def get_ireq(self): if self.is_named: ireq = pip_shims.shims.install_req_from_line(self.line) if self.is_file or self.is_remote_url: - ireq.link = self.link + ireq.link = pip_shims.shims.Link(expand_env_variables(self.link.url)) if self.extras and not ireq.extras: ireq.extras = set(self.extras) if self.parsed_marker is not None and not ireq.markers: @@ -1413,43 +1406,51 @@ def pipfile_part(self): ) -@attr.s(slots=True, cmp=True, hash=True) +@attr.s(slots=True, eq=True, order=True, hash=True) class FileRequirement(object): """File requirements for tar.gz installable files or wheels or setup.py containing directories.""" #: Path to the relevant `setup.py` location - setup_path = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + setup_path = attr.ib(default=None, eq=True, order=True) # type: Optional[STRING_TYPE] #: path to hit - without any of the VCS prefixes (like git+ / http+ / etc) - path = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + path = attr.ib(default=None, eq=True, order=True) # type: Optional[STRING_TYPE] #: Whether the package is editable - editable = attr.ib(default=False, cmp=True) # type: bool + editable = attr.ib(default=False, eq=True, order=True) # type: bool #: Extras if applicable extras = attr.ib( - default=attr.Factory(tuple), cmp=True + default=attr.Factory(tuple), eq=True, order=True ) # type: Tuple[STRING_TYPE, ...] - _uri_scheme = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + _uri_scheme = attr.ib( + default=None, eq=True, order=True + ) # type: Optional[STRING_TYPE] #: URI of the package - uri = attr.ib(cmp=True) # type: Optional[STRING_TYPE] + uri = attr.ib(eq=True, order=True) # type: Optional[STRING_TYPE] #: Link object representing the package to clone - link = attr.ib(cmp=True) # type: Optional[Link] + link = attr.ib(eq=True, order=True) # type: Optional[Link] #: PyProject Requirements pyproject_requires = attr.ib( - factory=tuple, cmp=True + factory=tuple, eq=True, order=True ) # type: Optional[Tuple[STRING_TYPE, ...]] #: PyProject Build System - pyproject_backend = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + pyproject_backend = attr.ib( + default=None, eq=True, order=True + ) # type: Optional[STRING_TYPE] #: PyProject Path - pyproject_path = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + pyproject_path = attr.ib( + default=None, eq=True, order=True + ) # type: Optional[STRING_TYPE] subdirectory = attr.ib(default=None) # type: Optional[STRING_TYPE] #: Setup metadata e.g. dependencies - _setup_info = attr.ib(default=None, cmp=True) # type: Optional[SetupInfo] - _has_hashed_name = attr.ib(default=False, cmp=True) # type: bool - _parsed_line = attr.ib(default=None, cmp=False, hash=True) # type: Optional[Line] + _setup_info = attr.ib(default=None, eq=True, order=True) # type: Optional[SetupInfo] + _has_hashed_name = attr.ib(default=False, eq=True, order=True) # type: bool + _parsed_line = attr.ib( + default=None, eq=False, order=False, hash=True + ) # type: Optional[Line] #: Package name - name = attr.ib(cmp=True) # type: Optional[STRING_TYPE] + name = attr.ib(eq=True, order=True) # type: Optional[STRING_TYPE] #: A :class:`~pkg_resources.Requirement` instance - req = attr.ib(cmp=True) # type: Optional[PackagingRequirement] + req = attr.ib(eq=True, order=True) # type: Optional[PackagingRequirement] @classmethod def get_link_from_line(cls, line): @@ -2140,7 +2141,7 @@ def get_vcs_repo(self, src_dir=None, checkout_dir=None): if checkout_dir is None: checkout_dir = self.get_checkout_dir(src_dir=src_dir) vcsrepo = VCSRepository( - url=self.url, + url=expand_env_variables(self.url), name=self.name, ref=self.ref if self.ref else None, checkout_directory=checkout_dir, @@ -2373,29 +2374,34 @@ def pipfile_part(self): return {name: pipfile_dict} # type: ignore -@attr.s(cmp=True, hash=True) +@attr.s(eq=True, order=True, hash=True) class Requirement(object): - _name = attr.ib(cmp=True) # type: STRING_TYPE + _name = attr.ib(eq=True, order=True) # type: STRING_TYPE vcs = attr.ib( - default=None, validator=attr.validators.optional(validate_vcs), cmp=True + default=None, + validator=attr.validators.optional(validate_vcs), + eq=True, + order=True, ) # type: Optional[STRING_TYPE] req = attr.ib( - default=None, cmp=True + default=None, eq=True, order=True ) # type: Optional[Union[VCSRequirement, FileRequirement, NamedRequirement]] - markers = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] + markers = attr.ib(default=None, eq=True, order=True) # type: Optional[STRING_TYPE] _specifiers = attr.ib( - validator=attr.validators.optional(validate_specifiers), cmp=True + validator=attr.validators.optional(validate_specifiers), eq=True, order=True ) # type: Optional[STRING_TYPE] - index = attr.ib(default=None, cmp=True) # type: Optional[STRING_TYPE] - editable = attr.ib(default=None, cmp=True) # type: Optional[bool] + index = attr.ib(default=None, eq=True, order=True) # type: Optional[STRING_TYPE] + editable = attr.ib(default=None, eq=True, order=True) # type: Optional[bool] hashes = attr.ib( - factory=frozenset, converter=frozenset, cmp=True + factory=frozenset, converter=frozenset, eq=True, order=True ) # type: FrozenSet[STRING_TYPE] - extras = attr.ib(factory=tuple, cmp=True) # type: Tuple[STRING_TYPE, ...] - abstract_dep = attr.ib(default=None, cmp=False) # type: Optional[AbstractDependency] - _line_instance = attr.ib(default=None, cmp=False) # type: Optional[Line] + extras = attr.ib(factory=tuple, eq=True, order=True) # type: Tuple[STRING_TYPE, ...] + abstract_dep = attr.ib( + default=None, eq=False, order=False + ) # type: Optional[AbstractDependency] + _line_instance = attr.ib(default=None, eq=False, order=False) # type: Optional[Line] _ireq = attr.ib( - default=None, cmp=False + default=None, eq=False, order=False ) # type: Optional[pip_shims.InstallRequirement] def __hash__(self): diff --git a/pipenv/vendor/requirementslib/models/resolvers.py b/pipenv/vendor/requirementslib/models/resolvers.py index 4554b29907..43590523d1 100644 --- a/pipenv/vendor/requirementslib/models/resolvers.py +++ b/pipenv/vendor/requirementslib/models/resolvers.py @@ -1,7 +1,7 @@ # -*- coding=utf-8 -*- from contextlib import contextmanager -from pipenv.vendor import attr +import attr import six from pip_shims.shims import Wheel diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 7e604cc878..58ea997921 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -12,7 +12,7 @@ import sys from functools import partial -from pipenv.vendor import attr +import attr import chardet import packaging.specifiers import packaging.utils @@ -1027,7 +1027,9 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no constant = ast.Ellipsis unparsed = item if isinstance(item, ast.Dict): - unparsed = dict(zip(map(_ensure_hashable, unparse(item.keys)), unparse(item.values))) + unparsed = dict( + zip(map(_ensure_hashable, unparse(item.keys)), unparse(item.values)) + ) elif isinstance(item, ast.List): unparsed = [unparse(el) for el in item.elts] elif isinstance(item, ast.Tuple): @@ -1175,7 +1177,7 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no # XXX: Original reference try: targets = item.targets # for ast.Assign - except AttributeError: # for ast.AnnAssign + except AttributeError: # for ast.AnnAssign targets = (item.target,) if not initial_mapping: target = unparse(next(iter(targets)), recurse=False) @@ -1326,9 +1328,9 @@ def run_setup(script_path, egg_base=None): @attr.s(slots=True, frozen=True) class BaseRequirement(object): - name = attr.ib(default="", cmp=True) # type: STRING_TYPE + name = attr.ib(default="", eq=True, order=True) # type: STRING_TYPE requirement = attr.ib( - default=None, cmp=True + default=None, eq=True, order=True ) # type: Optional[PkgResourcesRequirement] def __str__(self): @@ -1368,8 +1370,8 @@ def from_req(cls, req): @attr.s(slots=True, frozen=True) class Extra(object): - name = attr.ib(default=None, cmp=True) # type: STRING_TYPE - requirements = attr.ib(factory=frozenset, cmp=True, type=frozenset) + name = attr.ib(default=None, eq=True, order=True) # type: STRING_TYPE + requirements = attr.ib(factory=frozenset, eq=True, order=True, type=frozenset) def __str__(self): # type: () -> S @@ -1933,7 +1935,8 @@ def from_ireq(cls, ireq, subdir=None, finder=None, session=None): if not ireq.source_dir: build_kwargs = { "build_dir": kwargs["build_dir"], - "autodelete": False, "parallel_builds": True + "autodelete": False, + "parallel_builds": True, } call_function_with_correct_args(build_location_func, **build_kwargs) ireq.ensure_has_source_dir(kwargs["src_dir"]) diff --git a/pipenv/vendor/requirementslib/models/url.py b/pipenv/vendor/requirementslib/models/url.py index befacdb612..8f5d84884f 100644 --- a/pipenv/vendor/requirementslib/models/url.py +++ b/pipenv/vendor/requirementslib/models/url.py @@ -1,7 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function -from pipenv.vendor import attr +import attr import pip_shims.shims from orderedmultidict import omdict from six.moves.urllib.parse import quote, unquote_plus, unquote as url_unquote diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 6c3b7de8a5..59900c68a2 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -1028,6 +1028,22 @@ def read_source(path, encoding="utf-8"): return fp.read() +def expand_env_variables(line): + # type: (AnyStr) -> AnyStr + """Expand the env vars in a line following pip's standard. + https://pip.pypa.io/en/stable/reference/pip_install/#id10 + + Matches environment variable-style values in '${MY_VARIABLE_1}' with the + variable name consisting of only uppercase letters, digits or the '_' + """ + + def replace_with_env(match): + value = os.getenv(match.group(1)) + return value if value else match.group() + + return re.sub(r"\$\{([A-Z0-9_]+)\}", replace_with_env, line) + + SETUPTOOLS_SHIM = ( "import setuptools, tokenize;__file__=%r;" "f=getattr(tokenize, 'open', open)(__file__);" diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index 434c01bb1e..d4a2f160c7 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -5,7 +5,7 @@ import os import sys -from pipenv.vendor import attr +import attr import pip_shims import six diff --git a/pipenv/vendor/resolvelib/__init__.py b/pipenv/vendor/resolvelib/__init__.py index aaba5b3a12..78ede4fd1a 100644 --- a/pipenv/vendor/resolvelib/__init__.py +++ b/pipenv/vendor/resolvelib/__init__.py @@ -11,7 +11,7 @@ "ResolutionTooDeep", ] -__version__ = "0.3.0" +__version__ = "0.5.2" from .providers import AbstractProvider, AbstractResolver diff --git a/pipenv/vendor/resolvelib/compat/__init__.py b/pipenv/vendor/resolvelib/compat/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/resolvelib/compat/collections_abc.py b/pipenv/vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 0000000000..366cc5e2e1 --- /dev/null +++ b/pipenv/vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Sequence"] + +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence diff --git a/pipenv/vendor/resolvelib/providers.py b/pipenv/vendor/resolvelib/providers.py index db1682195e..965cf9c138 100644 --- a/pipenv/vendor/resolvelib/providers.py +++ b/pipenv/vendor/resolvelib/providers.py @@ -1,33 +1,37 @@ class AbstractProvider(object): - """Delegate class to provide requirement interface for the resolver. - """ + """Delegate class to provide requirement interface for the resolver.""" - def identify(self, dependency): - """Given a dependency, return an identifier for it. + def identify(self, requirement_or_candidate): + """Given a requirement or candidate, return an identifier for it. - This is used in many places to identify the dependency, e.g. whether - two requirements should have their specifier parts merged, whether - two specifications would conflict with each other (because they the - same name but different versions). + This is used in many places to identify a requirement or candidate, + e.g. whether two requirements should have their specifier parts merged, + whether two candidates would conflict with each other (because they + have same name but different versions). """ raise NotImplementedError def get_preference(self, resolution, candidates, information): - """Produce a sort key for given specification based on preference. + """Produce a sort key for given requirement based on preference. The preference is defined as "I think this requirement should be resolved first". The lower the return value is, the more preferred this group of arguments is. :param resolution: Currently pinned candidate, or `None`. - :param candidates: A list of possible candidates. + :param candidates: An iterable of possible candidates. :param information: A list of requirement information. - Each information instance is a named tuple with two entries: + The `candidates` iterable's exact type depends on the return type of + `find_matches()`. A sequence is passed-in as-is if possible. If it + returns a callble, the iterator returned by that callable is passed + in here. + + Each element in `information` is a named tuple with two entries: * `requirement` specifies a requirement contributing to the current - candidate list - * `parent` specifies the candidate that provids (dependend on) the + candidate list. + * `parent` specifies the candidate that provides (dependend on) the requirement, or `None` to indicate a root requirement. The preference could depend on a various of issues, including (not @@ -43,28 +47,40 @@ def get_preference(self, resolution, candidates, information): A sortable value should be returned (this will be used as the `key` parameter of the built-in sorting function). The smaller the value is, - the more preferred this specification is (i.e. the sorting function + the more preferred this requirement is (i.e. the sorting function is called with `reverse=False`). """ raise NotImplementedError - def find_matches(self, requirement): - """Find all possible candidates that satisfy a requirement. + def find_matches(self, requirements): + """Find all possible candidates that satisfy the given requirements. - This should try to get candidates based on the requirement's type. + This should try to get candidates based on the requirements' types. For VCS, local, and archive requirements, the one-and-only match is returned, and for a "named" requirement, the index(es) should be consulted to find concrete candidates for this requirement. - The returned candidates should be sorted by reversed preference, e.g. - the most preferred should be LAST. This is done so list-popping can be - as efficient as possible. + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + + :param requirements: A collection of requirements which all of the + returned candidates must match. All requirements are guaranteed to + have the same identifier. The collection is never empty. """ raise NotImplementedError def is_satisfied_by(self, requirement, candidate): """Whether the given requirement can be satisfied by a candidate. + The candidate is guarenteed to have been generated from the + requirement. + A boolean should be returned to indicate whether `candidate` is a viable solution to the requirement. """ @@ -80,8 +96,7 @@ def get_dependencies(self, candidate): class AbstractResolver(object): - """The thing that performs the actual resolution work. - """ + """The thing that performs the actual resolution work.""" base_exception = Exception @@ -92,30 +107,13 @@ def __init__(self, provider, reporter): def resolve(self, requirements, **kwargs): """Take a collection of constraints, spit out the resolution result. - Parameters - ---------- - requirements : Collection - A collection of constraints - kwargs : optional - Additional keyword arguments that subclasses may accept. - - Raises - ------ - self.base_exception - Any raised exception is guaranteed to be a subclass of - self.base_exception. The string representation of an exception - should be human readable and provide context for why it occurred. - - Returns - ------- - retval : object - A representation of the final resolution state. It can be any object - with a `mapping` attribute that is a Mapping. Other attributes can - be used to provide resolver-specific information. - - The `mapping` attribute MUST be key-value pair is an identifier of a - requirement (as returned by the provider's `identify` method) mapped - to the resolved candidate (chosen from the return value of the - provider's `find_matches` method). + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. """ raise NotImplementedError diff --git a/pipenv/vendor/resolvelib/reporters.py b/pipenv/vendor/resolvelib/reporters.py index c7e9e88b83..563489e133 100644 --- a/pipenv/vendor/resolvelib/reporters.py +++ b/pipenv/vendor/resolvelib/reporters.py @@ -1,10 +1,8 @@ class BaseReporter(object): - """Delegate class to provider progress reporting for the resolver. - """ + """Delegate class to provider progress reporting for the resolver.""" def starting(self): - """Called before the resolution actually starts. - """ + """Called before the resolution actually starts.""" def starting_round(self, index): """Called before each round of resolution starts. @@ -20,17 +18,20 @@ def ending_round(self, index, state): """ def ending(self, state): - """Called before the resolution ends successfully. - """ + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. - def adding_requirement(self, requirement): - """Called when the resolver adds a new requirement into the resolve criteria. + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. """ def backtracking(self, candidate): - """Called when the resolver rejects a candidate during backtracking. - """ + """Called when rejecting a candidate during backtracking.""" def pinning(self, candidate): - """Called when adding a candidate to the potential solution. - """ + """Called when adding a candidate to the potential solution.""" diff --git a/pipenv/vendor/resolvelib/resolvers.py b/pipenv/vendor/resolvelib/resolvers.py index b51d337d23..976608b177 100644 --- a/pipenv/vendor/resolvelib/resolvers.py +++ b/pipenv/vendor/resolvelib/resolvers.py @@ -1,7 +1,7 @@ import collections from .providers import AbstractResolver -from .structs import DirectedGraph +from .structs import DirectedGraph, build_iter_view RequirementInformation = collections.namedtuple( @@ -68,22 +68,18 @@ def __init__(self, candidates, information, incompatibilities): def __repr__(self): requirements = ", ".join( - "{!r} from {!r}".format(req, parent) + "({!r}, via={!r})".format(req, parent) for req, parent in self.information ) - return "".format(requirements) + return "Criterion({})".format(requirements) @classmethod def from_requirement(cls, provider, requirement, parent): - """Build an instance from a requirement. - """ - candidates = provider.find_matches(requirement) - criterion = cls( - candidates=candidates, - information=[RequirementInformation(requirement, parent)], - incompatibilities=[], - ) - if not candidates: + """Build an instance from a requirement.""" + cands = build_iter_view(provider.find_matches([requirement])) + infos = [RequirementInformation(requirement, parent)] + criterion = cls(cands, infos, incompatibilities=[]) + if not cands: raise RequirementsConflicted(criterion) return criterion @@ -94,17 +90,12 @@ def iter_parent(self): return (i.parent for i in self.information) def merged_with(self, provider, requirement, parent): - """Build a new instance from this and a new requirement. - """ + """Build a new instance from this and a new requirement.""" infos = list(self.information) infos.append(RequirementInformation(requirement, parent)) - candidates = [ - c - for c in self.candidates - if provider.is_satisfied_by(requirement, c) - ] - criterion = type(self)(candidates, infos, list(self.incompatibilities)) - if not candidates: + cands = build_iter_view(provider.find_matches([r for r, _ in infos])) + criterion = type(self)(cands, infos, list(self.incompatibilities)) + if not cands: raise RequirementsConflicted(criterion) return criterion @@ -113,13 +104,12 @@ def excluded_of(self, candidate): Returns the new instance, or None if we still have no valid candidates. """ + cands = self.candidates.excluding(candidate) + if not cands: + return None incompats = list(self.incompatibilities) incompats.append(candidate) - candidates = [c for c in self.candidates if c != candidate] - if not candidates: - return None - criterion = type(self)(candidates, list(self.information), incompats) - return criterion + return type(self)(cands, list(self.information), incompats) class ResolutionError(ResolverException): @@ -174,12 +164,13 @@ def _push_new_state(self): state = State(mapping=collections.OrderedDict(), criteria={}) else: state = State( - mapping=base.mapping.copy(), criteria=base.criteria.copy(), + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), ) self._states.append(state) def _merge_into_criterion(self, requirement, parent): - self._r.adding_requirement(requirement) + self._r.adding_requirement(requirement, parent) name = self._p.identify(requirement) try: crit = self.state.criteria[name] @@ -191,12 +182,10 @@ def _merge_into_criterion(self, requirement, parent): def _get_criterion_item_preference(self, item): name, criterion = item - try: - pinned = self.state.mapping[name] - except KeyError: - pinned = None return self._p.get_preference( - pinned, criterion.candidates, criterion.information, + self.state.mapping.get(name), + criterion.candidates.for_preference(), + criterion.information, ) def _is_current_pin_satisfying(self, name, criterion): @@ -218,13 +207,24 @@ def _get_criteria_to_update(self, candidate): def _attempt_to_pin_criterion(self, name, criterion): causes = [] - for candidate in reversed(criterion.candidates): + for candidate in criterion.candidates: try: criteria = self._get_criteria_to_update(candidate) except RequirementsConflicted as e: causes.append(e.criterion) continue + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(r, candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + # Put newly-pinned candidate at the end. This is essential because # backtracking looks at this mapping to get the last pin. self._r.pinning(candidate) @@ -232,13 +232,6 @@ def _attempt_to_pin_criterion(self, name, criterion): self.state.mapping[name] = candidate self.state.criteria.update(criteria) - # Check the newly-pinned candidate actually works. This should - # always pass under normal circumstances, but in the case of a - # faulty provider, we will raise an error to notify the implementer - # to fix find_matches() and/or is_satisfied_by(). - if not self._is_current_pin_satisfying(name, criterion): - raise InconsistentCandidate(candidate, criterion) - return [] # All candidates tried, nothing works. This criterion is a dead @@ -246,23 +239,32 @@ def _attempt_to_pin_criterion(self, name, criterion): return causes def _backtrack(self): - # We need at least 3 states here: - # (a) One known not working, to drop. - # (b) One to backtrack to. - # (c) One to restore state (b) to its state prior to candidate-pinning, + # Drop the current state, it's known not to work. + del self._states[-1] + + # We need at least 2 states here: + # (a) One to backtrack to. + # (b) One to restore state (a) to its state prior to candidate-pinning, # so we can pin another one instead. - while len(self._states) >= 3: - del self._states[-1] - # Retract the last candidate pin, and create a new (b). - name, candidate = self._states.pop().mapping.popitem() + while len(self._states) >= 2: + # Retract the last candidate pin. + prev_state = self._states.pop() + try: + name, candidate = prev_state.mapping.popitem() + except KeyError: + continue self._r.backtracking(candidate) + + # Create a new state to work on, with the newly known not-working + # candidate excluded. self._push_new_state() # Mark the retracted candidate as incompatible. criterion = self.state.criteria[name].excluded_of(candidate) if criterion is None: # This state still does not work. Try the still previous state. + del self._states[-1] continue self.state.criteria[name] = criterion @@ -376,8 +378,7 @@ def _build_result(state): class Resolver(AbstractResolver): - """The thing that performs the actual resolution work. - """ + """The thing that performs the actual resolution work.""" base_exception = ResolverException diff --git a/pipenv/vendor/resolvelib/structs.py b/pipenv/vendor/resolvelib/structs.py index 1eee08b383..479aad5dc1 100644 --- a/pipenv/vendor/resolvelib/structs.py +++ b/pipenv/vendor/resolvelib/structs.py @@ -1,6 +1,8 @@ +from .compat import collections_abc + + class DirectedGraph(object): - """A graph structure with directed edges. - """ + """A graph structure with directed edges.""" def __init__(self): self._vertices = set() @@ -17,8 +19,7 @@ def __contains__(self, key): return key in self._vertices def copy(self): - """Return a shallow copy of this graph. - """ + """Return a shallow copy of this graph.""" other = DirectedGraph() other._vertices = set(self._vertices) other._forwards = {k: set(v) for k, v in self._forwards.items()} @@ -26,8 +27,7 @@ def copy(self): return other def add(self, key): - """Add a new vertex to the graph. - """ + """Add a new vertex to the graph.""" if key in self._vertices: raise ValueError("vertex exists") self._vertices.add(key) @@ -35,8 +35,7 @@ def add(self, key): self._backwards[key] = set() def remove(self, key): - """Remove a vertex from the graph, disconnecting all edges from/to it. - """ + """Remove a vertex from the graph, disconnecting all edges from/to it.""" self._vertices.remove(key) for f in self._forwards.pop(key): self._backwards[f].remove(key) @@ -66,3 +65,79 @@ def iter_children(self, key): def iter_parents(self, key): return iter(self._backwards[key]) + + +class _FactoryIterableView(object): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory): + self._factory = factory + + def __bool__(self): + try: + next(self._factory()) + except StopIteration: + return False + return True + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return self._factory() + + def for_preference(self): + """Provide an candidate iterable for `get_preference()`""" + return self._factory() + + def excluding(self, candidate): + """Create a new `Candidates` instance excluding `candidate`.""" + + def factory(): + return (c for c in self._factory() if c != candidate) + + return type(self)(factory) + + +class _SequenceIterableView(object): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence): + self._sequence = sequence + + def __bool__(self): + return bool(self._sequence) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return iter(self._sequence) + + def __len__(self): + return len(self._sequence) + + def for_preference(self): + """Provide an candidate iterable for `get_preference()`""" + return self._sequence + + def excluding(self, candidate): + """Create a new instance excluding `candidate`.""" + return type(self)([c for c in self._sequence if c != candidate]) + + +def build_iter_view(matches): + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, collections_abc.Sequence): + matches = list(matches) + return _SequenceIterableView(matches) diff --git a/pipenv/vendor/semver.py b/pipenv/vendor/semver.py index 6bc9fcab41..ce8816afb3 100644 --- a/pipenv/vendor/semver.py +++ b/pipenv/vendor/semver.py @@ -1,53 +1,173 @@ -""" -Python helper for Semantic Versioning (http://semver.org/) -""" +"""Python helper for Semantic Versioning (http://semver.org/)""" from __future__ import print_function import argparse import collections -from functools import wraps +from functools import wraps, partial +import inspect import re import sys +import warnings -__version__ = '2.9.0' -__author__ = 'Kostiantyn Rybnikov' -__author_email__ = 'k-bx@k-bx.com' -__maintainer__ = 'Sebastien Celles' +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + + +__version__ = "2.13.0" +__author__ = "Kostiantyn Rybnikov" +__author_email__ = "k-bx@k-bx.com" +__maintainer__ = ["Sebastien Celles", "Tom Schraitle"] __maintainer_email__ = "s.celles@gmail.com" -_REGEX = re.compile( - r""" - ^ - (?P(?:0|[1-9][0-9]*)) - \. - (?P(?:0|[1-9][0-9]*)) - \. - (?P(?:0|[1-9][0-9]*)) - (\-(?P - (?:0|[1-9A-Za-z-][0-9A-Za-z-]*) - (\.(?:0|[1-9A-Za-z-][0-9A-Za-z-]*))* - ))? - (\+(?P - [0-9A-Za-z-]+ - (\.[0-9A-Za-z-]+)* - ))? - $ - """, re.VERBOSE) - -_LAST_NUMBER = re.compile(r'(?:[^\d]*(\d+)[^\d]*)+') +#: Our public interface +__all__ = ( + # + # Module level function: + "bump_build", + "bump_major", + "bump_minor", + "bump_patch", + "bump_prerelease", + "compare", + "deprecated", + "finalize_version", + "format_version", + "match", + "max_ver", + "min_ver", + "parse", + "parse_version_info", + "replace", + # + # CLI interface + "cmd_bump", + "cmd_check", + "cmd_compare", + "createparser", + "main", + "process", + # + # Constants and classes + "SEMVER_SPEC_VERSION", + "VersionInfo", +) #: Contains the implemented semver.org version of the spec SEMVER_SPEC_VERSION = "2.0.0" -if not hasattr(__builtins__, 'cmp'): +if not hasattr(__builtins__, "cmp"): + def cmp(a, b): + """Return negative if ab.""" return (a > b) - (a < b) +if PY3: # pragma: no cover + string_types = str, bytes + text_type = str + binary_type = bytes + + def b(s): + return s.encode("latin-1") + + def u(s): + return s + + +else: # pragma: no cover + string_types = unicode, str + text_type = unicode + binary_type = str + + def b(s): + return s + + # Workaround for standalone backslash + def u(s): + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + + +def ensure_str(s, encoding="utf-8", errors="strict"): + # Taken from six project + """ + Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def deprecated(func=None, replace=None, version=None, category=DeprecationWarning): + """ + Decorates a function to output a deprecation warning. + + :param func: the function to decorate (or None) + :param str replace: the function to replace (use the full qualified + name like ``semver.VersionInfo.bump_major``. + :param str version: the first version when this function was deprecated. + :param category: allow you to specify the deprecation warning class + of your choice. By default, it's :class:`DeprecationWarning`, but + you can choose :class:`PendingDeprecationWarning` or a custom class. + """ + + if func is None: + return partial(deprecated, replace=replace, version=version, category=category) + + @wraps(func) + def wrapper(*args, **kwargs): + msg = ["Function '{m}.{f}' is deprecated."] + + if version: + msg.append("Deprecated since version {v}. ") + msg.append("This function will be removed in semver 3.") + if replace: + msg.append("Use {r!r} instead.") + else: + msg.append("Use the respective 'semver.VersionInfo.{r}' instead.") + + # hasattr is needed for Python2 compatibility: + f = func.__qualname__ if hasattr(func, "__qualname__") else func.__name__ + r = replace or f + + frame = inspect.currentframe().f_back + + msg = " ".join(msg) + warnings.warn_explicit( + msg.format(m=func.__module__, f=f, r=r, v=version), + category=category, + filename=inspect.getfile(frame.f_code), + lineno=frame.f_lineno, + ) + # As recommended in the Python documentation + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + # better remove the interpreter stack: + del frame + return func(*args, **kwargs) + + return wrapper + + +@deprecated(version="2.10.0") def parse(version): - """Parse version to major, minor, patch, pre-release, build parts. + """ + Parse version to major, minor, patch, pre-release, build parts. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.parse` instead. :param version: version string :return: dictionary with the keys 'build', 'major', 'minor', 'patch', @@ -67,33 +187,28 @@ def parse(version): >>> ver['build'] 'build.4' """ - match = _REGEX.match(version) - if match is None: - raise ValueError('%s is not valid SemVer string' % version) - - version_parts = match.groupdict() - - version_parts['major'] = int(version_parts['major']) - version_parts['minor'] = int(version_parts['minor']) - version_parts['patch'] = int(version_parts['patch']) - - return version_parts + return VersionInfo.parse(version).to_dict() def comparator(operator): - """ Wrap a VersionInfo binary op method in a type-check """ + """Wrap a VersionInfo binary op method in a type-check.""" + @wraps(operator) def wrapper(self, other): - comparable_types = (VersionInfo, dict, tuple) + comparable_types = (VersionInfo, dict, tuple, list, text_type, binary_type) if not isinstance(other, comparable_types): - raise TypeError("other type %r must be in %r" - % (type(other), comparable_types)) + raise TypeError( + "other type %r must be in %r" % (type(other), comparable_types) + ) return operator(self, other) + return wrapper class VersionInfo(object): """ + A semver compatible version class. + :param int major: version when you make incompatible API changes. :param int minor: version when you add functionality in a backwards-compatible manner. @@ -101,18 +216,57 @@ class VersionInfo(object): :param str prerelease: an optional prerelease string :param str build: an optional build string """ - __slots__ = ('_major', '_minor', '_patch', '_prerelease', '_build') + + __slots__ = ("_major", "_minor", "_patch", "_prerelease", "_build") + #: Regex for number in a prerelease + _LAST_NUMBER = re.compile(r"(?:[^\d]*(\d+)[^\d]*)+") + #: Regex for a semver version + _REGEX = re.compile( + r""" + ^ + (?P0|[1-9]\d*) + \. + (?P0|[1-9]\d*) + \. + (?P0|[1-9]\d*) + (?:-(?P + (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*) + (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))* + ))? + (?:\+(?P + [0-9a-zA-Z-]+ + (?:\.[0-9a-zA-Z-]+)* + ))? + $ + """, + re.VERBOSE, + ) def __init__(self, major, minor=0, patch=0, prerelease=None, build=None): - self._major = int(major) - self._minor = int(minor) - self._patch = int(patch) + # Build a dictionary of the arguments except prerelease and build + version_parts = { + "major": major, + "minor": minor, + "patch": patch, + } + + for name, value in version_parts.items(): + value = int(value) + version_parts[name] = value + if value < 0: + raise ValueError( + "{!r} is negative. A version can only be positive.".format(name) + ) + + self._major = version_parts["major"] + self._minor = version_parts["minor"] + self._patch = version_parts["patch"] self._prerelease = None if prerelease is None else str(prerelease) self._build = None if build is None else str(build) @property def major(self): - """The major part of a version""" + """The major part of a version (read-only).""" return self._major @major.setter @@ -121,7 +275,7 @@ def major(self, value): @property def minor(self): - """The minor part of a version""" + """The minor part of a version (read-only).""" return self._minor @minor.setter @@ -130,7 +284,7 @@ def minor(self, value): @property def patch(self): - """The patch part of a version""" + """The patch part of a version (read-only).""" return self._patch @patch.setter @@ -139,7 +293,7 @@ def patch(self, value): @property def prerelease(self): - """The prerelease part of a version""" + """The prerelease part of a version (read-only).""" return self._prerelease @prerelease.setter @@ -148,189 +302,498 @@ def prerelease(self, value): @property def build(self): - """The build part of a version""" + """The build part of a version (read-only).""" return self._build @build.setter def build(self, value): raise AttributeError("attribute 'build' is readonly") + def to_tuple(self): + """ + Convert the VersionInfo object to a tuple. + + .. versionadded:: 2.10.0 + Renamed ``VersionInfo._astuple`` to ``VersionInfo.to_tuple`` to + make this function available in the public API. + + :return: a tuple with all the parts + :rtype: tuple + + >>> semver.VersionInfo(5, 3, 1).to_tuple() + (5, 3, 1, None, None) + """ + return (self.major, self.minor, self.patch, self.prerelease, self.build) + + def to_dict(self): + """ + Convert the VersionInfo object to an OrderedDict. + + .. versionadded:: 2.10.0 + Renamed ``VersionInfo._asdict`` to ``VersionInfo.to_dict`` to + make this function available in the public API. + + :return: an OrderedDict with the keys in the order ``major``, ``minor``, + ``patch``, ``prerelease``, and ``build``. + :rtype: :class:`collections.OrderedDict` + + >>> semver.VersionInfo(3, 2, 1).to_dict() + OrderedDict([('major', 3), ('minor', 2), ('patch', 1), \ +('prerelease', None), ('build', None)]) + """ + return collections.OrderedDict( + ( + ("major", self.major), + ("minor", self.minor), + ("patch", self.patch), + ("prerelease", self.prerelease), + ("build", self.build), + ) + ) + + # For compatibility reasons: + @deprecated(replace="semver.VersionInfo.to_tuple", version="2.10.0") def _astuple(self): - return (self.major, self.minor, self.patch, - self.prerelease, self.build) + return self.to_tuple() # pragma: no cover + _astuple.__doc__ = to_tuple.__doc__ + + @deprecated(replace="semver.VersionInfo.to_dict", version="2.10.0") def _asdict(self): - return collections.OrderedDict(( - ("major", self.major), - ("minor", self.minor), - ("patch", self.patch), - ("prerelease", self.prerelease), - ("build", self.build) - )) + return self.to_dict() # pragma: no cover + + _asdict.__doc__ = to_dict.__doc__ def __iter__(self): """Implement iter(self).""" # As long as we support Py2.7, we can't use the "yield from" syntax - for v in self._astuple(): + for v in self.to_tuple(): yield v + @staticmethod + def _increment_string(string): + """ + Look for the last sequence of number(s) in a string and increment. + + :param str string: the string to search for. + :return: the incremented string + + Source: + http://code.activestate.com/recipes/442460-increment-numbers-in-a-string/#c1 + """ + match = VersionInfo._LAST_NUMBER.search(string) + if match: + next_ = str(int(match.group(1)) + 1) + start, end = match.span(1) + string = string[: max(end - len(next_), start)] + next_ + string[end:] + return string + def bump_major(self): - """Raise the major part of the version, return a new object - but leave self untouched + """ + Raise the major part of the version, return a new object but leave self + untouched. :return: new object with the raised major part - :rtype: VersionInfo + :rtype: :class:`VersionInfo` - >>> ver = semver.parse_version_info("3.4.5") + >>> ver = semver.VersionInfo.parse("3.4.5") >>> ver.bump_major() VersionInfo(major=4, minor=0, patch=0, prerelease=None, build=None) """ - return parse_version_info(bump_major(str(self))) + cls = type(self) + return cls(self._major + 1) def bump_minor(self): - """Raise the minor part of the version, return a new object - but leave self untouched + """ + Raise the minor part of the version, return a new object but leave self + untouched. :return: new object with the raised minor part - :rtype: VersionInfo + :rtype: :class:`VersionInfo` - >>> ver = semver.parse_version_info("3.4.5") + >>> ver = semver.VersionInfo.parse("3.4.5") >>> ver.bump_minor() VersionInfo(major=3, minor=5, patch=0, prerelease=None, build=None) """ - return parse_version_info(bump_minor(str(self))) + cls = type(self) + return cls(self._major, self._minor + 1) def bump_patch(self): - """Raise the patch part of the version, return a new object - but leave self untouched + """ + Raise the patch part of the version, return a new object but leave self + untouched. :return: new object with the raised patch part - :rtype: VersionInfo + :rtype: :class:`VersionInfo` - >>> ver = semver.parse_version_info("3.4.5") + >>> ver = semver.VersionInfo.parse("3.4.5") >>> ver.bump_patch() VersionInfo(major=3, minor=4, patch=6, prerelease=None, build=None) """ - return parse_version_info(bump_patch(str(self))) + cls = type(self) + return cls(self._major, self._minor, self._patch + 1) - def bump_prerelease(self, token='rc'): - """Raise the prerelease part of the version, return a new object - but leave self untouched + def bump_prerelease(self, token="rc"): + """ + Raise the prerelease part of the version, return a new object but leave + self untouched. :param token: defaults to 'rc' :return: new object with the raised prerelease part - :rtype: str + :rtype: :class:`VersionInfo` - >>> ver = semver.parse_version_info("3.4.5-rc.1") + >>> ver = semver.VersionInfo.parse("3.4.5-rc.1") >>> ver.bump_prerelease() VersionInfo(major=3, minor=4, patch=5, prerelease='rc.2', \ build=None) """ - return parse_version_info(bump_prerelease(str(self), token)) + cls = type(self) + prerelease = cls._increment_string(self._prerelease or (token or "rc") + ".0") + return cls(self._major, self._minor, self._patch, prerelease) - def bump_build(self, token='build'): - """Raise the build part of the version, return a new object - but leave self untouched + def bump_build(self, token="build"): + """ + Raise the build part of the version, return a new object but leave self + untouched. :param token: defaults to 'build' :return: new object with the raised build part - :rtype: str + :rtype: :class:`VersionInfo` - >>> ver = semver.parse_version_info("3.4.5-rc.1+build.9") + >>> ver = semver.VersionInfo.parse("3.4.5-rc.1+build.9") >>> ver.bump_build() VersionInfo(major=3, minor=4, patch=5, prerelease='rc.1', \ build='build.10') """ - return parse_version_info(bump_build(str(self), token)) + cls = type(self) + build = cls._increment_string(self._build or (token or "build") + ".0") + return cls(self._major, self._minor, self._patch, self._prerelease, build) + + def compare(self, other): + """ + Compare self with other. + + :param other: the second version (can be string, a dict, tuple/list, or + a VersionInfo instance) + :return: The return value is negative if ver1 < ver2, + zero if ver1 == ver2 and strictly positive if ver1 > ver2 + :rtype: int + + >>> semver.VersionInfo.parse("1.0.0").compare("2.0.0") + -1 + >>> semver.VersionInfo.parse("2.0.0").compare("1.0.0") + 1 + >>> semver.VersionInfo.parse("2.0.0").compare("2.0.0") + 0 + >>> semver.VersionInfo.parse("2.0.0").compare(dict(major=2, minor=0, patch=0)) + 0 + """ + cls = type(self) + if isinstance(other, string_types): + other = cls.parse(other) + elif isinstance(other, dict): + other = cls(**other) + elif isinstance(other, (tuple, list)): + other = cls(*other) + elif not isinstance(other, cls): + raise TypeError( + "Expected str or {} instance, but got {}".format( + cls.__name__, type(other) + ) + ) + + v1 = self.to_tuple()[:3] + v2 = other.to_tuple()[:3] + x = cmp(v1, v2) + if x: + return x + + rc1, rc2 = self.prerelease, other.prerelease + rccmp = _nat_cmp(rc1, rc2) + + if not rccmp: + return 0 + if not rc1: + return 1 + elif not rc2: + return -1 + + return rccmp + + def next_version(self, part, prerelease_token="rc"): + """ + Determines next version, preserving natural order. + + .. versionadded:: 2.10.0 + + This function is taking prereleases into account. + The "major", "minor", and "patch" raises the respective parts like + the ``bump_*`` functions. The real difference is using the + "preprelease" part. It gives you the next patch version of the prerelease, + for example: + + >>> str(semver.VersionInfo.parse("0.1.4").next_version("prerelease")) + '0.1.5-rc.1' + + :param part: One of "major", "minor", "patch", or "prerelease" + :param prerelease_token: prefix string of prerelease, defaults to 'rc' + :return: new object with the appropriate part raised + :rtype: :class:`VersionInfo` + """ + validparts = { + "major", + "minor", + "patch", + "prerelease", + # "build", # currently not used + } + if part not in validparts: + raise ValueError( + "Invalid part. Expected one of {validparts}, but got {part!r}".format( + validparts=validparts, part=part + ) + ) + version = self + if (version.prerelease or version.build) and ( + part == "patch" + or (part == "minor" and version.patch == 0) + or (part == "major" and version.minor == version.patch == 0) + ): + return version.replace(prerelease=None, build=None) + + if part in ("major", "minor", "patch"): + return getattr(version, "bump_" + part)() + + if not version.prerelease: + version = version.bump_patch() + return version.bump_prerelease(prerelease_token) @comparator def __eq__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) == 0 + return self.compare(other) == 0 @comparator def __ne__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) != 0 + return self.compare(other) != 0 @comparator def __lt__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) < 0 + return self.compare(other) < 0 @comparator def __le__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) <= 0 + return self.compare(other) <= 0 @comparator def __gt__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) > 0 + return self.compare(other) > 0 @comparator def __ge__(self, other): - return _compare_by_keys(self._asdict(), _to_dict(other)) >= 0 + return self.compare(other) >= 0 + + def __getitem__(self, index): + """ + self.__getitem__(index) <==> self[index] + + Implement getitem. If the part requested is undefined, or a part of the + range requested is undefined, it will throw an index error. + Negative indices are not supported + + :param Union[int, slice] index: a positive integer indicating the + offset or a :func:`slice` object + :raises: IndexError, if index is beyond the range or a part is None + :return: the requested part of the version at position index + + >>> ver = semver.VersionInfo.parse("3.4.5") + >>> ver[0], ver[1], ver[2] + (3, 4, 5) + """ + if isinstance(index, int): + index = slice(index, index + 1) + + if ( + isinstance(index, slice) + and (index.start is not None and index.start < 0) + or (index.stop is not None and index.stop < 0) + ): + raise IndexError("Version index cannot be negative") + + part = tuple(filter(lambda p: p is not None, self.to_tuple()[index])) + + if len(part) == 1: + part = part[0] + elif not part: + raise IndexError("Version part undefined") + return part def __repr__(self): - s = ", ".join("%s=%r" % (key, val) - for key, val in self._asdict().items()) + s = ", ".join("%s=%r" % (key, val) for key, val in self.to_dict().items()) return "%s(%s)" % (type(self).__name__, s) def __str__(self): - return format_version(*(self._astuple())) + """str(self)""" + version = "%d.%d.%d" % (self.major, self.minor, self.patch) + if self.prerelease: + version += "-%s" % self.prerelease + if self.build: + version += "+%s" % self.build + return version def __hash__(self): - return hash(self._astuple()) + return hash(self.to_tuple()[:4]) - @staticmethod - def parse(version): - """Parse version string to a VersionInfo instance. + def finalize_version(self): + """ + Remove any prerelease and build metadata from the version. + + :return: a new instance with the finalized version string + :rtype: :class:`VersionInfo` + + >>> str(semver.VersionInfo.parse('1.2.3-rc.5').finalize_version()) + '1.2.3' + """ + cls = type(self) + return cls(self.major, self.minor, self.patch) + + def match(self, match_expr): + """ + Compare self to match a match expression. + + :param str match_expr: operator and version; valid operators are + < smaller than + > greater than + >= greator or equal than + <= smaller or equal than + == equal + != not equal + :return: True if the expression matches the version, otherwise False + :rtype: bool + + >>> semver.VersionInfo.parse("2.0.0").match(">=1.0.0") + True + >>> semver.VersionInfo.parse("1.0.0").match(">1.0.0") + False + """ + prefix = match_expr[:2] + if prefix in (">=", "<=", "==", "!="): + match_version = match_expr[2:] + elif prefix and prefix[0] in (">", "<"): + prefix = prefix[0] + match_version = match_expr[1:] + else: + raise ValueError( + "match_expr parameter should be in format , " + "where is one of " + "['<', '>', '==', '<=', '>=', '!=']. " + "You provided: %r" % match_expr + ) + + possibilities_dict = { + ">": (1,), + "<": (-1,), + "==": (0,), + "!=": (-1, 1), + ">=": (0, 1), + "<=": (-1, 0), + } + + possibilities = possibilities_dict[prefix] + cmp_res = self.compare(match_version) + + return cmp_res in possibilities + + @classmethod + def parse(cls, version): + """ + Parse version string to a VersionInfo instance. :param version: version string - :return: a :class:`semver.VersionInfo` instance - :rtype: :class:`semver.VersionInfo` + :return: a :class:`VersionInfo` instance + :raises: :class:`ValueError` + :rtype: :class:`VersionInfo` + + .. versionchanged:: 2.11.0 + Changed method from static to classmethod to + allow subclasses. >>> semver.VersionInfo.parse('3.4.5-pre.2+build.4') VersionInfo(major=3, minor=4, patch=5, \ prerelease='pre.2', build='build.4') """ - return parse_version_info(version) + match = cls._REGEX.match(ensure_str(version)) + if match is None: + raise ValueError("%s is not valid SemVer string" % version) + + version_parts = match.groupdict() + + version_parts["major"] = int(version_parts["major"]) + version_parts["minor"] = int(version_parts["minor"]) + version_parts["patch"] = int(version_parts["patch"]) + + return cls(**version_parts) def replace(self, **parts): - """Replace one or more parts of a version and return a new - :class:`semver.VersionInfo` object, but leave self untouched + """ + Replace one or more parts of a version and return a new + :class:`VersionInfo` object, but leave self untouched + + .. versionadded:: 2.9.0 + Added :func:`VersionInfo.replace` :param dict parts: the parts to be updated. Valid keys are: ``major``, ``minor``, ``patch``, ``prerelease``, or ``build`` - :return: the new :class:`semver.VersionInfo` object with the changed + :return: the new :class:`VersionInfo` object with the changed parts - :raises: TypeError, if ``parts`` contains invalid keys + :raises: :class:`TypeError`, if ``parts`` contains invalid keys """ - version = self._asdict() + version = self.to_dict() version.update(parts) try: return VersionInfo(**version) except TypeError: - unknownkeys = set(parts) - set(self._asdict()) - error = ("replace() got %d unexpected keyword " - "argument(s): %s" % (len(unknownkeys), - ", ".join(unknownkeys)) - ) + unknownkeys = set(parts) - set(self.to_dict()) + error = "replace() got %d unexpected keyword " "argument(s): %s" % ( + len(unknownkeys), + ", ".join(unknownkeys), + ) raise TypeError(error) + @classmethod + def isvalid(cls, version): + """ + Check if the string is a valid semver version. + + .. versionadded:: 2.9.1 -def _to_dict(obj): - if isinstance(obj, VersionInfo): - return obj._asdict() - elif isinstance(obj, tuple): - return VersionInfo(*obj)._asdict() - return obj + :param str version: the version string to check + :return: True if the version string is a valid semver version, False + otherwise. + :rtype: bool + """ + try: + cls.parse(version) + return True + except ValueError: + return False +@deprecated(replace="semver.VersionInfo.parse", version="2.10.0") def parse_version_info(version): - """Parse version string to a VersionInfo instance. + """ + Parse version string to a VersionInfo instance. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.parse` instead. + + .. versionadded:: 2.7.2 + Added :func:`semver.parse_version_info` :param version: version string :return: a :class:`VersionInfo` instance :rtype: :class:`VersionInfo` - >>> version_info = semver.parse_version_info("3.4.5-pre.2+build.4") + >>> version_info = semver.VersionInfo.parse("3.4.5-pre.2+build.4") >>> version_info.major 3 >>> version_info.minor @@ -342,20 +805,15 @@ def parse_version_info(version): >>> version_info.build 'build.4' """ - parts = parse(version) - version_info = VersionInfo( - parts['major'], parts['minor'], parts['patch'], - parts['prerelease'], parts['build']) - - return version_info + return VersionInfo.parse(version) def _nat_cmp(a, b): def convert(text): - return int(text) if re.match('^[0-9]+$', text) else text + return int(text) if re.match("^[0-9]+$", text) else text def split_key(key): - return [convert(c) for c in key.split('.')] + return [convert(c) for c in key.split(".")] def cmp_prerelease_tag(a, b): if isinstance(a, int) and isinstance(b, int): @@ -367,7 +825,7 @@ def cmp_prerelease_tag(a, b): else: return cmp(a, b) - a, b = a or '', b or '' + a, b = a or "", b or "" a_parts, b_parts = split_key(a), split_key(b) for sub_a, sub_b in zip(a_parts, b_parts): cmp_result = cmp_prerelease_tag(sub_a, sub_b) @@ -377,27 +835,10 @@ def cmp_prerelease_tag(a, b): return cmp(len(a), len(b)) -def _compare_by_keys(d1, d2): - for key in ['major', 'minor', 'patch']: - v = cmp(d1.get(key), d2.get(key)) - if v: - return v - - rc1, rc2 = d1.get('prerelease'), d2.get('prerelease') - rccmp = _nat_cmp(rc1, rc2) - - if not rccmp: - return 0 - if not rc1: - return 1 - elif not rc2: - return -1 - - return rccmp - - +@deprecated(version="2.10.0") def compare(ver1, ver2): - """Compare two versions + """ + Compare two versions strings. :param ver1: version string 1 :param ver2: version string 2 @@ -412,14 +853,14 @@ def compare(ver1, ver2): >>> semver.compare("2.0.0", "2.0.0") 0 """ - - v1, v2 = parse(ver1), parse(ver2) - - return _compare_by_keys(v1, v2) + v1 = VersionInfo.parse(ver1) + return v1.compare(ver2) +@deprecated(version="2.10.0") def match(version, match_expr): - """Compare two versions through a comparison + """ + Compare two versions strings through a comparison. :param str version: a version string :param str match_expr: operator and version; valid operators are @@ -437,35 +878,14 @@ def match(version, match_expr): >>> semver.match("1.0.0", ">1.0.0") False """ - prefix = match_expr[:2] - if prefix in ('>=', '<=', '==', '!='): - match_version = match_expr[2:] - elif prefix and prefix[0] in ('>', '<'): - prefix = prefix[0] - match_version = match_expr[1:] - else: - raise ValueError("match_expr parameter should be in format , " - "where is one of " - "['<', '>', '==', '<=', '>=', '!=']. " - "You provided: %r" % match_expr) - - possibilities_dict = { - '>': (1,), - '<': (-1,), - '==': (0,), - '!=': (-1, 1), - '>=': (0, 1), - '<=': (-1, 0) - } - - possibilities = possibilities_dict[prefix] - cmp_res = compare(version, match_version) - - return cmp_res in possibilities + ver = VersionInfo.parse(version) + return ver.match(match_expr) +@deprecated(replace="max", version="2.10.2") def max_ver(ver1, ver2): - """Returns the greater version of two versions + """ + Returns the greater version of two versions strings. :param ver1: version string 1 :param ver2: version string 2 @@ -475,15 +895,21 @@ def max_ver(ver1, ver2): >>> semver.max_ver("1.0.0", "2.0.0") '2.0.0' """ - cmp_res = compare(ver1, ver2) - if cmp_res == 0 or cmp_res == 1: - return ver1 + if isinstance(ver1, string_types): + ver1 = VersionInfo.parse(ver1) + elif not isinstance(ver1, VersionInfo): + raise TypeError() + cmp_res = ver1.compare(ver2) + if cmp_res >= 0: + return str(ver1) else: return ver2 +@deprecated(replace="min", version="2.10.2") def min_ver(ver1, ver2): - """Returns the smaller version of two versions + """ + Returns the smaller version of two versions strings. :param ver1: version string 1 :param ver2: version string 2 @@ -493,15 +919,21 @@ def min_ver(ver1, ver2): >>> semver.min_ver("1.0.0", "2.0.0") '1.0.0' """ - cmp_res = compare(ver1, ver2) - if cmp_res == 0 or cmp_res == -1: - return ver1 + ver1 = VersionInfo.parse(ver1) + cmp_res = ver1.compare(ver2) + if cmp_res <= 0: + return str(ver1) else: return ver2 +@deprecated(replace="str(versionobject)", version="2.10.0") def format_version(major, minor, patch, prerelease=None, build=None): - """Format a version according to the Semantic Versioning specification + """ + Format a version string according to the Semantic Versioning specification. + + .. deprecated:: 2.10.0 + Use ``str(VersionInfo(VERSION)`` instead. :param int major: the required major part of a version :param int minor: the required minor part of a version @@ -514,31 +946,16 @@ def format_version(major, minor, patch, prerelease=None, build=None): >>> semver.format_version(3, 4, 5, 'pre.2', 'build.4') '3.4.5-pre.2+build.4' """ - version = "%d.%d.%d" % (major, minor, patch) - if prerelease is not None: - version = version + "-%s" % prerelease - - if build is not None: - version = version + "+%s" % build + return str(VersionInfo(major, minor, patch, prerelease, build)) - return version - -def _increment_string(string): - """ - Look for the last sequence of number(s) in a string and increment, from: - http://code.activestate.com/recipes/442460-increment-numbers-in-a-string/#c1 +@deprecated(version="2.10.0") +def bump_major(version): """ - match = _LAST_NUMBER.search(string) - if match: - next_ = str(int(match.group(1)) + 1) - start, end = match.span(1) - string = string[:max(end - len(next_), start)] + next_ + string[end:] - return string - + Raise the major part of the version string. -def bump_major(version): - """Raise the major part of the version + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.bump_major` instead. :param: version string :return: the raised version string @@ -547,12 +964,16 @@ def bump_major(version): >>> semver.bump_major("3.4.5") '4.0.0' """ - verinfo = parse(version) - return format_version(verinfo['major'] + 1, 0, 0) + return str(VersionInfo.parse(version).bump_major()) +@deprecated(version="2.10.0") def bump_minor(version): - """Raise the minor part of the version + """ + Raise the minor part of the version string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.bump_minor` instead. :param: version string :return: the raised version string @@ -561,12 +982,16 @@ def bump_minor(version): >>> semver.bump_minor("3.4.5") '3.5.0' """ - verinfo = parse(version) - return format_version(verinfo['major'], verinfo['minor'] + 1, 0) + return str(VersionInfo.parse(version).bump_minor()) +@deprecated(version="2.10.0") def bump_patch(version): - """Raise the patch part of the version + """ + Raise the patch part of the version string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.bump_patch` instead. :param: version string :return: the raised version string @@ -575,13 +1000,16 @@ def bump_patch(version): >>> semver.bump_patch("3.4.5") '3.4.6' """ - verinfo = parse(version) - return format_version(verinfo['major'], verinfo['minor'], - verinfo['patch'] + 1) + return str(VersionInfo.parse(version).bump_patch()) -def bump_prerelease(version, token='rc'): - """Raise the prerelease part of the version +@deprecated(version="2.10.0") +def bump_prerelease(version, token="rc"): + """ + Raise the prerelease part of the version string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.bump_prerelease` instead. :param version: version string :param token: defaults to 'rc' @@ -591,16 +1019,16 @@ def bump_prerelease(version, token='rc'): >>> semver.bump_prerelease('3.4.5', 'dev') '3.4.5-dev.1' """ - verinfo = parse(version) - verinfo['prerelease'] = _increment_string( - verinfo['prerelease'] or (token or 'rc') + '.0' - ) - return format_version(verinfo['major'], verinfo['minor'], verinfo['patch'], - verinfo['prerelease']) + return str(VersionInfo.parse(version).bump_prerelease(token)) -def bump_build(version, token='build'): - """Raise the build part of the version +@deprecated(version="2.10.0") +def bump_build(version, token="build"): + """ + Raise the build part of the version string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.bump_build` instead. :param version: version string :param token: defaults to 'build' @@ -610,16 +1038,19 @@ def bump_build(version, token='build'): >>> semver.bump_build('3.4.5-rc.1+build.9') '3.4.5-rc.1+build.10' """ - verinfo = parse(version) - verinfo['build'] = _increment_string( - verinfo['build'] or (token or 'build') + '.0' - ) - return format_version(verinfo['major'], verinfo['minor'], verinfo['patch'], - verinfo['prerelease'], verinfo['build']) + return str(VersionInfo.parse(version).bump_build(token)) +@deprecated(version="2.10.0") def finalize_version(version): - """Remove any prerelease and build metadata from the version + """ + Remove any prerelease and build metadata from the version string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.finalize_version` instead. + + .. versionadded:: 2.7.9 + Added :func:`finalize_version` :param version: version string :return: the finalized version string @@ -628,60 +1059,161 @@ def finalize_version(version): >>> semver.finalize_version('1.2.3-rc.5') '1.2.3' """ - verinfo = parse(version) - return format_version(verinfo['major'], verinfo['minor'], verinfo['patch']) + verinfo = VersionInfo.parse(version) + return str(verinfo.finalize_version()) + + +@deprecated(version="2.10.0") +def replace(version, **parts): + """ + Replace one or more parts of a version and return the new string. + + .. deprecated:: 2.10.0 + Use :func:`semver.VersionInfo.replace` instead. + + .. versionadded:: 2.9.0 + Added :func:`replace` + + :param str version: the version string to replace + :param dict parts: the parts to be updated. Valid keys are: + ``major``, ``minor``, ``patch``, ``prerelease``, or ``build`` + :return: the replaced version string + :raises: TypeError, if ``parts`` contains invalid keys + :rtype: str + + >>> import semver + >>> semver.replace("1.2.3", major=2, patch=10) + '2.2.10' + """ + return str(VersionInfo.parse(version).replace(**parts)) + + +# ---- CLI +def cmd_bump(args): + """ + Subcommand: Bumps a version. + + Synopsis: bump + can be major, minor, patch, prerelease, or build + + :param args: The parsed arguments + :type args: :class:`argparse.Namespace` + :return: the new, bumped version + """ + maptable = { + "major": "bump_major", + "minor": "bump_minor", + "patch": "bump_patch", + "prerelease": "bump_prerelease", + "build": "bump_build", + } + if args.bump is None: + # When bump is called without arguments, + # print the help and exit + args.parser.parse_args(["bump", "-h"]) + + ver = VersionInfo.parse(args.version) + # get the respective method and call it + func = getattr(ver, maptable[args.bump]) + return str(func()) + + +def cmd_check(args): + """ + Subcommand: Checks if a string is a valid semver version. + + Synopsis: check + + :param args: The parsed arguments + :type args: :class:`argparse.Namespace` + """ + if VersionInfo.isvalid(args.version): + return None + raise ValueError("Invalid version %r" % args.version) + + +def cmd_compare(args): + """ + Subcommand: Compare two versions + + Synopsis: compare + + :param args: The parsed arguments + :type args: :class:`argparse.Namespace` + """ + return str(compare(args.version1, args.version2)) + + +def cmd_nextver(args): + """ + Subcommand: Determines the next version, taking prereleases into account. + + Synopsis: nextver + + :param args: The parsed arguments + :type args: :class:`argparse.Namespace` + """ + version = VersionInfo.parse(args.version) + return str(version.next_version(args.part)) def createparser(): - """Create an :class:`argparse.ArgumentParser` instance + """ + Create an :class:`argparse.ArgumentParser` instance. :return: parser instance :rtype: :class:`argparse.ArgumentParser` """ - parser = argparse.ArgumentParser(prog=__package__, - description=__doc__) - s = parser.add_subparsers() + parser = argparse.ArgumentParser(prog=__package__, description=__doc__) + parser.add_argument( + "--version", action="version", version="%(prog)s " + __version__ + ) + + s = parser.add_subparsers() # create compare subcommand - parser_compare = s.add_parser("compare", - help="Compare two versions" - ) - parser_compare.set_defaults(which="compare") - parser_compare.add_argument("version1", - help="First version" - ) - parser_compare.add_argument("version2", - help="Second version" - ) + parser_compare = s.add_parser("compare", help="Compare two versions") + parser_compare.set_defaults(func=cmd_compare) + parser_compare.add_argument("version1", help="First version") + parser_compare.add_argument("version2", help="Second version") # create bump subcommand - parser_bump = s.add_parser("bump", - help="Bumps a version" - ) - parser_bump.set_defaults(which="bump") - sb = parser_bump.add_subparsers(title="Bump commands", - dest="bump") + parser_bump = s.add_parser("bump", help="Bumps a version") + parser_bump.set_defaults(func=cmd_bump) + sb = parser_bump.add_subparsers(title="Bump commands", dest="bump") # Create subparsers for the bump subparser: - for p in (sb.add_parser("major", - help="Bump the major part of the version"), - sb.add_parser("minor", - help="Bump the minor part of the version"), - sb.add_parser("patch", - help="Bump the patch part of the version"), - sb.add_parser("prerelease", - help="Bump the prerelease part of the version"), - sb.add_parser("build", - help="Bump the build part of the version")): - p.add_argument("version", - help="Version to raise" - ) + for p in ( + sb.add_parser("major", help="Bump the major part of the version"), + sb.add_parser("minor", help="Bump the minor part of the version"), + sb.add_parser("patch", help="Bump the patch part of the version"), + sb.add_parser("prerelease", help="Bump the prerelease part of the version"), + sb.add_parser("build", help="Bump the build part of the version"), + ): + p.add_argument("version", help="Version to raise") + + # Create the check subcommand + parser_check = s.add_parser( + "check", help="Checks if a string is a valid semver version" + ) + parser_check.set_defaults(func=cmd_check) + parser_check.add_argument("version", help="Version to check") + # Create the nextver subcommand + parser_nextver = s.add_parser( + "nextver", help="Determines the next version, taking prereleases into account." + ) + parser_nextver.set_defaults(func=cmd_nextver) + parser_nextver.add_argument("version", help="Version to raise") + parser_nextver.add_argument( + "part", help="One of 'major', 'minor', 'patch', or 'prerelease'" + ) return parser def process(args): - """Process the input from the CLI + """ + Process the input from the CLI. :param args: The parsed arguments :type args: :class:`argparse.Namespace` @@ -690,24 +1222,17 @@ def process(args): :return: result of the selected action :rtype: str """ - if args.which == "bump": - maptable = {'major': 'bump_major', - 'minor': 'bump_minor', - 'patch': 'bump_patch', - 'prerelease': 'bump_prerelease', - 'build': 'bump_build', - } - ver = parse_version_info(args.version) - # get the respective method and call it - func = getattr(ver, maptable[args.bump]) - return str(func()) + if not hasattr(args, "func"): + args.parser.print_help() + raise SystemExit() - elif args.which == "compare": - return str(compare(args.version1, args.version2)) + # Call the respective function object: + return args.func(args) def main(cliargs=None): - """Entry point for the application script + """ + Entry point for the application script. :param list cliargs: Arguments to parse or None (=use :class:`sys.argv`) :return: error code @@ -716,9 +1241,11 @@ def main(cliargs=None): try: parser = createparser() args = parser.parse_args(args=cliargs) - # args.parser = parser + # Save parser instance: + args.parser = parser result = process(args) - print(result) + if result is not None: + print(result) return 0 except (ValueError, TypeError) as err: @@ -726,24 +1253,7 @@ def main(cliargs=None): return 2 -def replace(version, **parts): - """Replace one or more parts of a version and return the new string - - :param str version: the version string to replace - :param dict parts: the parts to be updated. Valid keys are: - ``major``, ``minor``, ``patch``, ``prerelease``, or ``build`` - :return: the replaced version string - :raises: TypeError, if ``parts`` contains invalid keys - :rtype: str - - >>> import semver - >>> semver.replace("1.2.3", major=2, patch=10) - '2.2.10' - """ - version = parse_version_info(version) - return str(version.replace(**parts)) - - if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/pipenv/vendor/six.py b/pipenv/vendor/six.py index 5fe9f8e141..83f69783d1 100644 --- a/pipenv/vendor/six.py +++ b/pipenv/vendor/six.py @@ -29,7 +29,7 @@ import types __author__ = "Benjamin Peterson " -__version__ = "1.14.0" +__version__ = "1.15.0" # Useful for very coarse version differentiation. @@ -890,12 +890,11 @@ def ensure_binary(s, encoding='utf-8', errors='strict'): - `str` -> encoded to `bytes` - `bytes` -> `bytes` """ + if isinstance(s, binary_type): + return s if isinstance(s, text_type): return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError("not expecting type '%s'" % type(s)) def ensure_str(s, encoding='utf-8', errors='strict'): @@ -909,12 +908,15 @@ def ensure_str(s, encoding='utf-8', errors='strict'): - `str` -> `str` - `bytes` -> decoded to `str` """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) + # Optimization: Fast return for the common case. + if type(s) is str: + return s if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) + return s.encode(encoding, errors) elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) return s diff --git a/pipenv/vendor/toml/__init__.py b/pipenv/vendor/toml/__init__.py index 7e13a0c36f..7719ac23a7 100644 --- a/pipenv/vendor/toml/__init__.py +++ b/pipenv/vendor/toml/__init__.py @@ -6,7 +6,7 @@ from toml import encoder from toml import decoder -__version__ = "0.10.1" +__version__ = "0.10.2" _spec_ = "0.5.0" load = decoder.load diff --git a/pipenv/vendor/toml/common.py b/pipenv/vendor/toml/common.py deleted file mode 100644 index a5d673dac5..0000000000 --- a/pipenv/vendor/toml/common.py +++ /dev/null @@ -1,6 +0,0 @@ -# content after the \ -escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] -# What it should be replaced by -escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] -# Used for substitution -escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) diff --git a/pipenv/vendor/toml/decoder.py b/pipenv/vendor/toml/decoder.py index 3ec5b43afe..bf400e9761 100644 --- a/pipenv/vendor/toml/decoder.py +++ b/pipenv/vendor/toml/decoder.py @@ -440,7 +440,8 @@ def loads(s, _dict=dict, decoder=None): groups[i][0] == "'"): groupstr = groups[i] j = i + 1 - while not groupstr[0] == groupstr[-1]: + while ((not groupstr[0] == groupstr[-1]) or + len(groupstr) == 1): j += 1 if j > len(groups) + 2: raise TomlDecodeError("Invalid group name '" + @@ -811,8 +812,12 @@ def load_value(self, v, strictly_valid=True): raise ValueError("Empty value is invalid") if v == 'true': return (True, "bool") + elif v.lower() == 'true': + raise ValueError("Only all lowercase booleans allowed") elif v == 'false': return (False, "bool") + elif v.lower() == 'false': + raise ValueError("Only all lowercase booleans allowed") elif v[0] == '"' or v[0] == "'": quotechar = v[0] testv = v[1:].split(quotechar) diff --git a/pipenv/vendor/toml/encoder.py b/pipenv/vendor/toml/encoder.py index d9e557ed95..bf17a72b62 100644 --- a/pipenv/vendor/toml/encoder.py +++ b/pipenv/vendor/toml/encoder.py @@ -61,7 +61,7 @@ def dumps(o, encoder=None): retval += addtoretval outer_objs = [id(o)] while sections: - section_ids = [id(section) for section in sections] + section_ids = [id(section) for section in sections.values()] for outer_obj in outer_objs: if outer_obj in section_ids: raise ValueError("Circular reference detected") diff --git a/pipenv/vendor/toml/tz.py b/pipenv/vendor/toml/tz.py index 93c3c8ad26..bf20593a26 100644 --- a/pipenv/vendor/toml/tz.py +++ b/pipenv/vendor/toml/tz.py @@ -11,6 +11,9 @@ def __init__(self, toml_offset): self._hours = int(self._raw_offset[1:3]) self._minutes = int(self._raw_offset[4:6]) + def __deepcopy__(self, memo): + return self.__class__(self._raw_offset) + def tzname(self, dt): return "UTC" + self._raw_offset diff --git a/pipenv/vendor/urllib3/LICENSE.txt b/pipenv/vendor/urllib3/LICENSE.txt index c89cf27b85..429a1767e4 100644 --- a/pipenv/vendor/urllib3/LICENSE.txt +++ b/pipenv/vendor/urllib3/LICENSE.txt @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pipenv/vendor/urllib3/__init__.py b/pipenv/vendor/urllib3/__init__.py index 667e9bce9e..fe86b59d78 100644 --- a/pipenv/vendor/urllib3/__init__.py +++ b/pipenv/vendor/urllib3/__init__.py @@ -1,28 +1,27 @@ """ -urllib3 - Thread-safe connection pooling and re-using. +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more """ from __future__ import absolute_import -import warnings -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +# Set default logging handler to avoid "No handler found" warnings. +import logging +import warnings +from logging import NullHandler from . import exceptions +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse from .util.request import make_headers -from .util.url import get_host -from .util.timeout import Timeout from .util.retry import Retry - - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler +from .util.timeout import Timeout +from .util.url import get_host __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" -__version__ = "1.25.9" +__version__ = __version__ __all__ = ( "HTTPConnectionPool", diff --git a/pipenv/vendor/urllib3/_collections.py b/pipenv/vendor/urllib3/_collections.py index 019d1511d5..da9857e986 100644 --- a/pipenv/vendor/urllib3/_collections.py +++ b/pipenv/vendor/urllib3/_collections.py @@ -17,9 +17,10 @@ def __exit__(self, exc_type, exc_value, traceback): from collections import OrderedDict -from .exceptions import InvalidHeader -from .packages.six import iterkeys, itervalues, PY3 +from .exceptions import InvalidHeader +from .packages import six +from .packages.six import iterkeys, itervalues __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] @@ -174,7 +175,7 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) - if not PY3: # Python 2 + if six.PY2: # Python 2 iterkeys = MutableMapping.iterkeys itervalues = MutableMapping.itervalues @@ -190,7 +191,7 @@ def __iter__(self): def pop(self, key, default=__marker): """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. + If key is not found, d is returned if given, otherwise KeyError is raised. """ # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. diff --git a/pipenv/vendor/urllib3/_version.py b/pipenv/vendor/urllib3/_version.py new file mode 100644 index 0000000000..cd4e7b06a4 --- /dev/null +++ b/pipenv/vendor/urllib3/_version.py @@ -0,0 +1,2 @@ +# This file is protected via CODEOWNERS +__version__ = "1.26.1" diff --git a/pipenv/vendor/urllib3/connection.py b/pipenv/vendor/urllib3/connection.py index 6da1cf4b6d..660d679c36 100644 --- a/pipenv/vendor/urllib3/connection.py +++ b/pipenv/vendor/urllib3/connection.py @@ -1,14 +1,18 @@ from __future__ import absolute_import -import re + import datetime import logging import os +import re import socket -from socket import error as SocketError, timeout as SocketTimeout import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + from .packages import six from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection from .packages.six.moves.http_client import HTTPException # noqa: F401 +from .util.proxy import create_proxy_ssl_context try: # Compiled with SSL? import ssl @@ -30,27 +34,33 @@ class ConnectionError(Exception): pass +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + BrokenPipeError = BrokenPipeError +except NameError: # Python 2: + + class BrokenPipeError(Exception): + pass + + +from ._collections import HTTPHeaderDict # noqa (historical, removed in v2) +from ._version import __version__ from .exceptions import ( - NewConnectionError, ConnectTimeoutError, + NewConnectionError, SubjectAltNameWarning, SystemTimeWarning, ) -from .packages.ssl_match_hostname import match_hostname, CertificateError - +from .packages.ssl_match_hostname import CertificateError, match_hostname +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection from .util.ssl_ import ( - resolve_cert_reqs, - resolve_ssl_version, assert_fingerprint, create_urllib3_context, + resolve_cert_reqs, + resolve_ssl_version, ssl_wrap_socket, ) - -from .util import connection - -from ._collections import HTTPHeaderDict - log = logging.getLogger(__name__) port_by_scheme = {"http": 80, "https": 443} @@ -62,34 +72,30 @@ class ConnectionError(Exception): _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -class DummyConnection(object): - """Used to detect a failed ConnectionCls import.""" - - pass - - class HTTPConnection(_HTTPConnection, object): """ - Based on httplib.HTTPConnection but provides an extra constructor + Based on :class:`http.client.HTTPConnection` but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - - ``source_address``: Set the source address for the current connection. - - ``socket_options``: Set specific options on the underlying socket. If not specified, then - defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling - Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. - For example, if you wish to enable TCP Keep Alive in addition to the defaults, - you might pass:: + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: - HTTPConnection.default_socket_options + [ - (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - ] + .. code-block:: python - Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme["http"] @@ -112,6 +118,10 @@ def __init__(self, *args, **kw): #: provided, we use the default options. self.socket_options = kw.pop("socket_options", self.default_socket_options) + # Proxy options provided by the user. + self.proxy = kw.pop("proxy", None) + self.proxy_config = kw.pop("proxy_config", None) + _HTTPConnection.__init__(self, *args, **kw) @property @@ -144,7 +154,7 @@ def host(self, value): self._dns_host = value def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it. + """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ @@ -174,10 +184,13 @@ def _new_conn(self): return conn + def _is_using_tunnel(self): + # Google App Engine's httplib does not define _tunnel_host + return getattr(self, "_tunnel_host", None) + def _prepare_conn(self, conn): self.sock = conn - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, "_tunnel_host", None): + if self._is_using_tunnel(): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -188,7 +201,9 @@ def connect(self): self._prepare_conn(conn) def putrequest(self, method, url, *args, **kwargs): - """Send a request to the server""" + """""" + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. match = _CONTAINS_CONTROL_CHAR_RE.search(method) if match: raise ValueError( @@ -198,17 +213,40 @@ def putrequest(self, method, url, *args, **kwargs): return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + def putheader(self, header, *values): + """""" + if SKIP_HEADER not in values: + _HTTPConnection.putheader(self, header, *values) + elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS: + raise ValueError( + "urllib3.util.SKIP_HEADER only supports '%s'" + % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),) + ) + + def request(self, method, url, body=None, headers=None): + if headers is None: + headers = {} + else: + # Avoid modifying the headers passed into .request() + headers = headers.copy() + if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): + headers["User-Agent"] = _get_default_user_agent() + super(HTTPConnection, self).request(method, url, body=body, headers=headers) + def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ - headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = "accept-encoding" in headers - skip_host = "host" in headers + headers = headers or {} + header_keys = set([six.ensure_str(k.lower()) for k in headers]) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) for header, value in headers.items(): self.putheader(header, value) if "transfer-encoding" not in headers: @@ -225,16 +263,22 @@ def request_chunked(self, method, url, body=None, headers=None): if not isinstance(chunk, bytes): chunk = chunk.encode("utf8") len_str = hex(len(chunk))[2:] - self.send(len_str.encode("utf-8")) - self.send(b"\r\n") - self.send(chunk) - self.send(b"\r\n") + to_send = bytearray(len_str.encode()) + to_send += b"\r\n" + to_send += chunk + to_send += b"\r\n" + self.send(to_send) # After the if clause, to always have a closed body self.send(b"0\r\n\r\n") class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + default_port = port_by_scheme["https"] cert_reqs = None @@ -243,6 +287,7 @@ class HTTPSConnection(HTTPConnection): ca_cert_data = None ssl_version = None assert_fingerprint = None + tls_in_tls_required = False def __init__( self, @@ -307,10 +352,15 @@ def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host + tls_in_tls = False + + if self._is_using_tunnel(): + if self.tls_in_tls_required: + conn = self._connect_tls_proxy(hostname, conn) + tls_in_tls = True - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, "_tunnel_host", None): self.sock = conn + # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() @@ -368,8 +418,26 @@ def connect(self): ca_cert_data=self.ca_cert_data, server_hostname=server_hostname, ssl_context=context, + tls_in_tls=tls_in_tls, ) + # If we're using all defaults and the connection + # is TLSv1 or TLSv1.1 we throw a DeprecationWarning + # for the host. + if ( + default_ssl_context + and self.ssl_version is None + and hasattr(self.sock, "version") + and self.sock.version() in {"TLSv1", "TLSv1.1"} + ): + warnings.warn( + "Negotiating TLSv1/TLSv1.1 by default is deprecated " + "and will be disabled in urllib3 v2.0.0. Connecting to " + "'%s' with '%s' can be enabled by explicitly opting-in " + "with 'ssl_version'" % (self.host, self.sock.version()), + DeprecationWarning, + ) + if self.assert_fingerprint: assert_fingerprint( self.sock.getpeercert(binary_form=True), self.assert_fingerprint @@ -400,6 +468,40 @@ def connect(self): or self.assert_fingerprint is not None ) + def _connect_tls_proxy(self, hostname, conn): + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + proxy_config = self.proxy_config + ssl_context = proxy_config.ssl_context + if ssl_context: + # If the user provided a proxy context, we assume CA and client + # certificates have already been set + return ssl_wrap_socket( + sock=conn, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + ssl_context = create_proxy_ssl_context( + self.ssl_version, + self.cert_reqs, + self.ca_certs, + self.ca_cert_dir, + self.ca_cert_data, + ) + + # If no cert was provided, use only the default options for server + # certificate validation + return ssl_wrap_socket( + sock=conn, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + ) + def _match_hostname(cert, asserted_hostname): try: @@ -416,6 +518,16 @@ def _match_hostname(cert, asserted_hostname): raise +def _get_default_user_agent(): + return "python-urllib3/%s" % __version__ + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + + pass + + if not ssl: HTTPSConnection = DummyConnection # noqa: F811 diff --git a/pipenv/vendor/urllib3/connectionpool.py b/pipenv/vendor/urllib3/connectionpool.py index 5f044dbd90..4708c5bfc7 100644 --- a/pipenv/vendor/urllib3/connectionpool.py +++ b/pipenv/vendor/urllib3/connectionpool.py @@ -1,57 +1,53 @@ from __future__ import absolute_import + import errno import logging +import socket import sys import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout -from socket import error as SocketError, timeout as SocketTimeout -import socket - - +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + VerifiedHTTPSConnection, + port_by_scheme, +) from .exceptions import ( ClosedPoolError, - ProtocolError, EmptyPoolError, HeaderParsingError, HostChangedError, + InsecureRequestWarning, LocationValueError, MaxRetryError, + NewConnectionError, + ProtocolError, ProxyError, ReadTimeoutError, SSLError, TimeoutError, - InsecureRequestWarning, - NewConnectionError, ) -from .packages.ssl_match_hostname import CertificateError from .packages import six from .packages.six.moves import queue -from .connection import ( - port_by_scheme, - DummyConnection, - HTTPConnection, - HTTPSConnection, - VerifiedHTTPSConnection, - HTTPException, - BaseSSLError, -) +from .packages.ssl_match_hostname import CertificateError from .request import RequestMethods from .response import HTTPResponse - from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.queue import LifoQueue from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import ( - get_host, - parse_url, - Url, - _normalize_host as normalize_host, - _encode_target, -) -from .util.queue import LifoQueue - +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import get_host, parse_url xrange = six.moves.xrange @@ -111,16 +107,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into - :class:`httplib.HTTPConnection`. + :class:`http.client.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed - into :class:`httplib.HTTPConnection`. + into :class:`http.client.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into - :class:`httplib.HTTPConnection`. + :class:`http.client.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. @@ -154,11 +150,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param _proxy: Parsed proxy URL, should not be used directly, instead, see - :class:`urllib3.connectionpool.ProxyManager`" + :class:`urllib3.ProxyManager` :param _proxy_headers: A dictionary with proxy headers, should not be used directly, - instead, see :class:`urllib3.connectionpool.ProxyManager`" + instead, see :class:`urllib3.ProxyManager` :param \\**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, @@ -181,6 +177,7 @@ def __init__( retries=None, _proxy=None, _proxy_headers=None, + _proxy_config=None, **conn_kw ): ConnectionPool.__init__(self, host, port) @@ -202,6 +199,7 @@ def __init__( self.proxy = _proxy self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config # Fill the queue up so that doing get() on it will block properly for _ in xrange(maxsize): @@ -218,6 +216,9 @@ def __init__( # list. self.conn_kw.setdefault("socket_options", []) + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -272,7 +273,7 @@ def _get_conn(self, timeout=None): conn.close() if getattr(conn, "auto_open", 1) == 0: # This is a proxied connection that has been mutated by - # httplib._tunnel() and cannot be reused (since it would + # http.client._tunnel() and cannot be reused (since it would # attempt to bypass the proxy) conn = None @@ -384,12 +385,30 @@ def _make_request( self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise - # conn.request() calls httplib.*.request, not the method in + # conn.request() calls http.client.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. - if chunked: - conn.request_chunked(method, url, **httplib_request_kw) - else: - conn.request(method, url, **httplib_request_kw) + try: + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + # Python 3 + pass + except IOError as e: + # Python 2 and macOS/Linux + # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + if e.errno not in { + errno.EPIPE, + errno.ESHUTDOWN, + errno.EPROTOTYPE, + }: + raise # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout @@ -532,10 +551,12 @@ def urlopen( :param method: HTTP request method (such as GET, POST, PUT, etc.) + :param url: + The URL to perform the request on. + :param body: - Data to send in the request body (useful for creating - POST requests, see HTTPConnectionPool.post_url for - more convenience). + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, @@ -565,7 +586,7 @@ def urlopen( :param assert_same_host: If ``True``, will make sure that the host of the pool requests is - consistent else will raise HostChangedError. When False, you can + consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: @@ -602,6 +623,10 @@ def urlopen( Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ + + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + if headers is None: headers = self.headers @@ -619,7 +644,7 @@ def urlopen( if url.startswith("/"): url = six.ensure_str(_encode_target(url)) else: - url = six.ensure_str(parse_url(url).url) + url = six.ensure_str(parsed_url.url) conn = None @@ -634,10 +659,14 @@ def urlopen( # [1] release_this_conn = release_conn - # Merge the proxy headers. Only do this in HTTP. We have to copy the - # headers dict so we can safely change it without those changes being - # reflected in anyone else's copy. - if self.scheme == "http": + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: headers = headers.copy() headers.update(self.proxy_headers) @@ -663,7 +692,7 @@ def urlopen( is_new_proxy_conn = self.proxy is not None and not getattr( conn, "sock", None ) - if is_new_proxy_conn: + if is_new_proxy_conn and http_tunnel_required: self._prepare_proxy(conn) # Make the request on the httplib connection object. @@ -698,9 +727,11 @@ def urlopen( # Everything went great! clean_exit = True - except queue.Empty: - # Timed out by queue. - raise EmptyPoolError(self, "No pool connections are available.") + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise except ( TimeoutError, @@ -835,11 +866,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): """ Same as :class:`.HTTPConnectionPool`, but HTTPS. - When Python is compiled with the :mod:`ssl` module, then - :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`.HTTPSConnection`. - - :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. @@ -923,15 +950,22 @@ def _prepare_conn(self, conn): def _prepare_proxy(self, conn): """ - Establish tunnel connection early, because otherwise httplib - would improperly set Host: header to proxy's IP:port. + Establishes a tunnel connection through HTTP CONNECT. + + Tunnel connection is established early because otherwise httplib would + improperly set Host: header to proxy's IP:port. """ + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + if self.proxy.scheme == "https": + conn.tls_in_tls_required = True + conn.connect() def _new_conn(self): """ - Return a fresh :class:`httplib.HTTPSConnection`. + Return a fresh :class:`http.client.HTTPSConnection`. """ self.num_connections += 1 log.debug( diff --git a/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py b/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py index d9b6733318..11524d400b 100644 --- a/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py +++ b/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py @@ -32,30 +32,26 @@ from __future__ import absolute_import import platform -from ctypes.util import find_library from ctypes import ( - c_void_p, - c_int32, + CDLL, + CFUNCTYPE, + POINTER, + c_bool, + c_byte, c_char_p, + c_int32, + c_long, c_size_t, - c_byte, c_uint32, c_ulong, - c_long, - c_bool, + c_void_p, ) -from ctypes import CDLL, POINTER, CFUNCTYPE - - -security_path = find_library("Security") -if not security_path: - raise ImportError("The library Security could not be found") - +from ctypes.util import find_library -core_foundation_path = find_library("CoreFoundation") -if not core_foundation_path: - raise ImportError("The library CoreFoundation could not be found") +from urllib3.packages.six import raise_from +if platform.system() != "Darwin": + raise ImportError("Only macOS is supported") version = platform.mac_ver()[0] version_info = tuple(map(int, version.split("."))) @@ -65,8 +61,31 @@ % (version_info[0], version_info[1]) ) -Security = CDLL(security_path, use_errno=True) -CoreFoundation = CDLL(core_foundation_path, use_errno=True) + +def load_cdll(name, macos10_16_path): + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + if version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise_from(ImportError("The library %s failed to load" % name), None) + + +Security = load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + Boolean = c_bool CFIndex = c_long @@ -276,6 +295,13 @@ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMax.restype = OSStatus + try: + Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetALPNProtocols.restype = OSStatus + except AttributeError: + # Supported only in 10.12+ + pass + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef diff --git a/pipenv/vendor/urllib3/contrib/_securetransport/low_level.py b/pipenv/vendor/urllib3/contrib/_securetransport/low_level.py index e60168cac1..ed8120190c 100644 --- a/pipenv/vendor/urllib3/contrib/_securetransport/low_level.py +++ b/pipenv/vendor/urllib3/contrib/_securetransport/low_level.py @@ -10,13 +10,13 @@ import base64 import ctypes import itertools -import re import os +import re import ssl +import struct import tempfile -from .bindings import Security, CoreFoundation, CFConst - +from .bindings import CFConst, CoreFoundation, Security # This regular expression is used to grab PEM data out of a PEM bundle. _PEM_CERTS_RE = re.compile( @@ -56,6 +56,51 @@ def _cf_dictionary_from_tuples(tuples): ) +def _cfstr(py_bstr): + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(py_bstr) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str + + +def _create_cfstring_array(lst): + """ + Given a list of Python binary data, create an associated CFMutableArray. + The array must be CFReleased by the caller. + + Raises an ssl.SSLError on failure. + """ + cf_arr = None + try: + cf_arr = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_arr: + raise MemoryError("Unable to allocate memory!") + for item in lst: + cf_str = _cfstr(item) + if not cf_str: + raise MemoryError("Unable to allocate memory!") + try: + CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) + finally: + CoreFoundation.CFRelease(cf_str) + except BaseException as e: + if cf_arr: + CoreFoundation.CFRelease(cf_arr) + raise ssl.SSLError("Unable to allocate array: %s" % (e,)) + return cf_arr + + def _cf_string_to_unicode(value): """ Creates a Unicode string from a CFString object. Used entirely for error @@ -326,3 +371,26 @@ def _load_client_cert_chain(keychain, *paths): finally: for obj in itertools.chain(identities, certificates): CoreFoundation.CFRelease(obj) + + +TLS_PROTOCOL_VERSIONS = { + "SSLv2": (0, 2), + "SSLv3": (3, 0), + "TLSv1": (3, 1), + "TLSv1.1": (3, 2), + "TLSv1.2": (3, 3), +} + + +def _build_tls_unknown_ca_alert(version): + """ + Builds a TLS alert record for an unknown CA. + """ + ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] + severity_fatal = 0x02 + description_unknown_ca = 0x30 + msg = struct.pack(">BB", severity_fatal, description_unknown_ca) + msg_len = len(msg) + record_type_alert = 0x15 + record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg + return record diff --git a/pipenv/vendor/urllib3/contrib/appengine.py b/pipenv/vendor/urllib3/contrib/appengine.py index 9b7044ffb0..aa64a0914c 100644 --- a/pipenv/vendor/urllib3/contrib/appengine.py +++ b/pipenv/vendor/urllib3/contrib/appengine.py @@ -39,24 +39,24 @@ """ from __future__ import absolute_import + import io import logging import warnings -from ..packages.six.moves.urllib.parse import urljoin from ..exceptions import ( HTTPError, HTTPWarning, MaxRetryError, ProtocolError, - TimeoutError, SSLError, + TimeoutError, ) - +from ..packages.six.moves.urllib.parse import urljoin from ..request import RequestMethods from ..response import HTTPResponse -from ..util.timeout import Timeout from ..util.retry import Retry +from ..util.timeout import Timeout from . import _appengine_environ try: @@ -90,7 +90,7 @@ class AppEngineManager(RequestMethods): * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. - * If a response size is more than 32 megabtyes. + * If a response size is more than 32 megabytes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. diff --git a/pipenv/vendor/urllib3/contrib/ntlmpool.py b/pipenv/vendor/urllib3/contrib/ntlmpool.py index 1fd242a6e0..b2df45dcf6 100644 --- a/pipenv/vendor/urllib3/contrib/ntlmpool.py +++ b/pipenv/vendor/urllib3/contrib/ntlmpool.py @@ -6,12 +6,12 @@ from __future__ import absolute_import from logging import getLogger + from ntlm import ntlm from .. import HTTPSConnectionPool from ..packages.six.moves.http_client import HTTPSConnection - log = getLogger(__name__) diff --git a/pipenv/vendor/urllib3/contrib/pyopenssl.py b/pipenv/vendor/urllib3/contrib/pyopenssl.py index 81a80651d4..0cabab1aed 100644 --- a/pipenv/vendor/urllib3/contrib/pyopenssl.py +++ b/pipenv/vendor/urllib3/contrib/pyopenssl.py @@ -1,27 +1,31 @@ """ -SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do +TLS with SNI_-support for Python 2. Follow these instructions if you would +like to verify TLS certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: -* pyOpenSSL (tested with 16.0.0) -* cryptography (minimum 1.3.4, from pyopenssl) -* idna (minimum 2.0, from cryptography) +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: - pip install pyopenssl cryptography idna +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, -like this:: +like this: + +.. code-block:: python try: import urllib3.contrib.pyopenssl @@ -35,11 +39,11 @@ Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna """ from __future__ import absolute_import @@ -56,8 +60,9 @@ class UnsupportedExtension(Exception): pass -from socket import timeout, error as SocketError from io import BytesIO +from socket import error as SocketError +from socket import timeout try: # Platform-specific: Python 2 from socket import _fileobject @@ -67,11 +72,10 @@ class UnsupportedExtension(Exception): import logging import ssl -from ..packages import six import sys from .. import util - +from ..packages import six __all__ = ["inject_into_urllib3", "extract_from_urllib3"] @@ -465,6 +469,10 @@ def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) + def set_alpn_protocols(self, protocols): + protocols = [six.ensure_binary(p) for p in protocols] + return self._ctx.set_alpn_protos(protocols) + def wrap_socket( self, sock, diff --git a/pipenv/vendor/urllib3/contrib/securetransport.py b/pipenv/vendor/urllib3/contrib/securetransport.py index a6b7e94ade..ab092de67a 100644 --- a/pipenv/vendor/urllib3/contrib/securetransport.py +++ b/pipenv/vendor/urllib3/contrib/securetransport.py @@ -29,6 +29,8 @@ that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: +.. code-block:: + Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a @@ -58,16 +60,21 @@ import shutil import socket import ssl +import struct import threading import weakref +import six + from .. import util -from ._securetransport.bindings import Security, SecurityConst, CoreFoundation +from ._securetransport.bindings import CoreFoundation, Security, SecurityConst from ._securetransport.low_level import ( _assert_no_error, + _build_tls_unknown_ca_alert, _cert_array_from_pem, - _temporary_keychain, + _create_cfstring_array, _load_client_cert_chain, + _temporary_keychain, ) try: # Platform-specific: Python 2 @@ -374,16 +381,55 @@ def _set_ciphers(self): ) _assert_no_error(result) + def _set_alpn_protocols(self, protocols): + """ + Sets up the ALPN protocols on the context. + """ + if not protocols: + return + protocols_arr = _create_cfstring_array(protocols) + try: + result = Security.SSLSetALPNProtocols(self.context, protocols_arr) + _assert_no_error(result) + finally: + CoreFoundation.CFRelease(protocols_arr) + def _custom_validate(self, verify, trust_bundle): """ Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. + Raises an SSLError if the connection is not trusted. """ # If we disabled cert validation, just say: cool. if not verify: return + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed, + ) + try: + trust_result = self._evaluate_trust(trust_bundle) + if trust_result in successes: + return + reason = "error code: %d" % (trust_result,) + except Exception as e: + # Do not trust on error + reason = "exception: %r" % (e,) + + # SecureTransport does not send an alert nor shuts down the connection. + rec = _build_tls_unknown_ca_alert(self.version()) + self.socket.sendall(rec) + # close the connection immediately + # l_onoff = 1, activate linger + # l_linger = 0, linger for 0 seoncds + opts = struct.pack("ii", 1, 0) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) + self.close() + raise ssl.SSLError("certificate verify failed, %s" % reason) + + def _evaluate_trust(self, trust_bundle): # We want data in memory, so load it up. if os.path.isfile(trust_bundle): with open(trust_bundle, "rb") as f: @@ -421,15 +467,7 @@ def _custom_validate(self, verify, trust_bundle): if cert_array is not None: CoreFoundation.CFRelease(cert_array) - # Ok, now we can look at what the result was. - successes = ( - SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed, - ) - if trust_result.value not in successes: - raise ssl.SSLError( - "certificate verify failed, error code: %d" % trust_result.value - ) + return trust_result.value def handshake( self, @@ -441,6 +479,7 @@ def handshake( client_cert, client_key, client_key_passphrase, + alpn_protocols, ): """ Actually performs the TLS handshake. This is run automatically by @@ -481,6 +520,9 @@ def handshake( # Setup the ciphers. self._set_ciphers() + # Setup the ALPN protocols. + self._set_alpn_protocols(alpn_protocols) + # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) @@ -754,6 +796,7 @@ def __init__(self, protocol): self._client_cert = None self._client_key = None self._client_key_passphrase = None + self._alpn_protocols = None @property def check_hostname(self): @@ -831,6 +874,18 @@ def load_cert_chain(self, certfile, keyfile=None, password=None): self._client_key = keyfile self._client_cert_passphrase = password + def set_alpn_protocols(self, protocols): + """ + Sets the ALPN protocols that will later be set on the context. + + Raises a NotImplementedError if ALPN is not supported. + """ + if not hasattr(Security, "SSLSetALPNProtocols"): + raise NotImplementedError( + "SecureTransport supports ALPN only in macOS 10.12+" + ) + self._alpn_protocols = [six.ensure_binary(p) for p in protocols] + def wrap_socket( self, sock, @@ -860,5 +915,6 @@ def wrap_socket( self._client_cert, self._client_key, self._client_key_passphrase, + self._alpn_protocols, ) return wrapped_socket diff --git a/pipenv/vendor/urllib3/contrib/socks.py b/pipenv/vendor/urllib3/contrib/socks.py index 9e97f7aa98..93df8325d5 100644 --- a/pipenv/vendor/urllib3/contrib/socks.py +++ b/pipenv/vendor/urllib3/contrib/socks.py @@ -14,22 +14,26 @@ - SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy - .. note:: - It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in - your ``proxy_url`` to ensure that DNS resolution is done from the remote - server instead of client-side when connecting to a domain name. +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 supports IPv4, IPv6, and domain names. When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` -will be sent as the ``userid`` section of the SOCKS request:: +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python proxy_url="socks4a://@proxy-host" When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion of the ``proxy_url`` will be sent as the username/password to authenticate -with the proxy:: +with the proxy: + +.. code-block:: python proxy_url="socks5h://:@proxy-host" @@ -40,6 +44,7 @@ import socks except ImportError: import warnings + from ..exceptions import DependencyWarning warnings.warn( @@ -52,7 +57,8 @@ ) raise -from socket import error as SocketError, timeout as SocketTimeout +from socket import error as SocketError +from socket import timeout as SocketTimeout from ..connection import HTTPConnection, HTTPSConnection from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool diff --git a/pipenv/vendor/urllib3/exceptions.py b/pipenv/vendor/urllib3/exceptions.py index 5cc4d8a4f1..d69958d5df 100644 --- a/pipenv/vendor/urllib3/exceptions.py +++ b/pipenv/vendor/urllib3/exceptions.py @@ -1,21 +1,24 @@ from __future__ import absolute_import + from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead # Base Exceptions class HTTPError(Exception): - "Base exception used by this module." + """Base exception used by this module.""" + pass class HTTPWarning(Warning): - "Base warning used by this module." + """Base warning used by this module.""" + pass class PoolError(HTTPError): - "Base exception for errors caused within a pool." + """Base exception for errors caused within a pool.""" def __init__(self, pool, message): self.pool = pool @@ -27,7 +30,7 @@ def __reduce__(self): class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." + """Base exception for PoolErrors that have associated URLs.""" def __init__(self, pool, url, message): self.url = url @@ -39,12 +42,13 @@ def __reduce__(self): class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." + """Raised when SSL certificate fails in an HTTPS connection.""" + pass class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." + """Raised when the connection to a proxy fails.""" def __init__(self, message, error, *args): super(ProxyError, self).__init__(message, error, *args) @@ -52,12 +56,14 @@ def __init__(self, message, error, *args): class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." + """Raised when automatic decoding based on Content-Type fails.""" + pass class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." + """Raised when something unexpected happens mid-request/response.""" + pass @@ -87,7 +93,7 @@ def __init__(self, pool, url, reason=None): class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." + """Raised when an existing pool gets a request for a foreign host.""" def __init__(self, pool, url, retries=3): message = "Tried to open a foreign host with url: %s" % url @@ -96,13 +102,13 @@ def __init__(self, pool, url, retries=3): class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ + """Raised when passing an invalid state to a timeout""" pass class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. + """Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. @@ -112,39 +118,45 @@ class TimeoutError(HTTPError): class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" + """Raised when a socket timeout occurs while receiving data from a server""" + pass # This timeout error does not have a URL attached and needs to inherit from the # base HTTPError class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" + """Raised when a socket timeout occurs while connecting to a server""" + pass class NewConnectionError(ConnectTimeoutError, PoolError): - "Raised when we fail to establish a new connection. Usually ECONNREFUSED." + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + pass class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." + """Raised when a pool runs out of connections and no more are allowed.""" + pass class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." + """Raised when a request enters a pool after the pool has been closed.""" + pass class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." + """Raised when there is something wrong with a given URL input.""" + pass class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." + """Raised when get_host or similar fails to parse the URL input.""" def __init__(self, location): message = "Failed to parse: %s" % location @@ -153,39 +165,56 @@ def __init__(self, location): self.location = location +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme): + message = "Not supported URL scheme %s" % scheme + super(URLSchemeUnknown, self).__init__(message) + + self.scheme = scheme + + class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." + """Used as a container for an error reason supplied in a MaxRetryError.""" + GENERIC_ERROR = "too many error responses" SPECIFIC_ERROR = "too many {status_code} error responses" class SecurityWarning(HTTPWarning): - "Warned when performing security reducing actions" + """Warned when performing security reducing actions""" + pass class SubjectAltNameWarning(SecurityWarning): - "Warned when connecting to a host with a certificate missing a SAN." + """Warned when connecting to a host with a certificate missing a SAN.""" + pass class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." + """Warned when making an unverified HTTPS request.""" + pass class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" + """Warned when system time is suspected to be wrong""" + pass class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." + """Warned when certain TLS/SSL configuration is not available on a platform.""" + pass class SNIMissingWarning(HTTPWarning): - "Warned when making a HTTPS request without SNI available." + """Warned when making a HTTPS request without SNI available.""" + pass @@ -198,29 +227,16 @@ class DependencyWarning(HTTPWarning): pass -class InvalidProxyConfigurationWarning(HTTPWarning): - """ - Warned when using an HTTPS proxy and an HTTPS URL. Currently - urllib3 doesn't support HTTPS proxies and the proxy will be - contacted via HTTP instead. This warning can be fixed by - changing your HTTPS proxy URL into an HTTP proxy URL. - - If you encounter this warning read this: - https://github.com/urllib3/urllib3/issues/1850 - """ - - pass - - class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." + """Response needs to be chunked in order to read it as chunks.""" + pass class BodyNotHttplibCompatible(HTTPError): """ - Body should be httplib.HTTPResponse like (have an fp attribute which - returns raw chunks) for read_chunked(). + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). """ pass @@ -230,9 +246,8 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead): """ Response length doesn't match expected Content-Length - Subclass of http_client.IncompleteRead to allow int value - for `partial` to avoid creating large objects on streamed - reads. + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. """ def __init__(self, partial, expected): @@ -245,13 +260,32 @@ def __repr__(self): ) +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response, length): + super(InvalidChunkLength, self).__init__( + response.tell(), response.length_remaining + ) + self.response = response + self.length = length + + def __repr__(self): + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + class InvalidHeader(HTTPError): - "The header provided was somehow invalid." + """The header provided was somehow invalid.""" + pass -class ProxySchemeUnknown(AssertionError, ValueError): - "ProxyManager does not support the supplied scheme" +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. def __init__(self, scheme): @@ -259,8 +293,14 @@ def __init__(self, scheme): super(ProxySchemeUnknown, self).__init__(message) +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + pass + + class HeaderParsingError(HTTPError): - "Raised by assert_header_parsing, but we convert it to a log.warning statement." + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" def __init__(self, defects, unparsed_data): message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) @@ -268,5 +308,6 @@ def __init__(self, defects, unparsed_data): class UnrewindableBodyError(HTTPError): - "urllib3 encountered an error when trying to rewind a body" + """urllib3 encountered an error when trying to rewind a body""" + pass diff --git a/pipenv/vendor/urllib3/fields.py b/pipenv/vendor/urllib3/fields.py index 8715b2202b..9d630f491d 100644 --- a/pipenv/vendor/urllib3/fields.py +++ b/pipenv/vendor/urllib3/fields.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + import email.utils import mimetypes import re @@ -26,7 +27,8 @@ def format_header_param_rfc2231(name, value): strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. :param name: The name of the parameter, a string expected to be ASCII only. @@ -65,7 +67,6 @@ def format_header_param_rfc2231(name, value): u"\u0022": u"%22", # Replace "\" with "\\". u"\u005C": u"\u005C\u005C", - u"\u005C": u"\u005C\u005C", } # All control characters from 0x00 to 0x1F *except* 0x1B. diff --git a/pipenv/vendor/urllib3/filepost.py b/pipenv/vendor/urllib3/filepost.py index b7b00992c6..36c9252c64 100644 --- a/pipenv/vendor/urllib3/filepost.py +++ b/pipenv/vendor/urllib3/filepost.py @@ -1,13 +1,13 @@ from __future__ import absolute_import + import binascii import codecs import os - from io import BytesIO +from .fields import RequestField from .packages import six from .packages.six import b -from .fields import RequestField writer = codecs.lookup("utf-8")[3] diff --git a/pipenv/vendor/urllib3/packages/backports/makefile.py b/pipenv/vendor/urllib3/packages/backports/makefile.py index a3156a69c0..b8fb2154b6 100644 --- a/pipenv/vendor/urllib3/packages/backports/makefile.py +++ b/pipenv/vendor/urllib3/packages/backports/makefile.py @@ -7,7 +7,6 @@ wants to create a "fake" socket object. """ import io - from socket import SocketIO diff --git a/pipenv/vendor/urllib3/packages/ssl_match_hostname/__init__.py b/pipenv/vendor/urllib3/packages/ssl_match_hostname/__init__.py index 75b6bb1cf0..6b12fd90aa 100644 --- a/pipenv/vendor/urllib3/packages/ssl_match_hostname/__init__.py +++ b/pipenv/vendor/urllib3/packages/ssl_match_hostname/__init__.py @@ -10,10 +10,13 @@ except ImportError: try: # Backport of the function from a pypi module - from backports.ssl_match_hostname import CertificateError, match_hostname + from backports.ssl_match_hostname import ( # type: ignore + CertificateError, + match_hostname, + ) except ImportError: # Our vendored copy - from ._implementation import CertificateError, match_hostname + from ._implementation import CertificateError, match_hostname # type: ignore # Not needed, but documenting what we provide. __all__ = ("CertificateError", "match_hostname") diff --git a/pipenv/vendor/urllib3/poolmanager.py b/pipenv/vendor/urllib3/poolmanager.py index e2bd3bd8db..3a31a285bf 100644 --- a/pipenv/vendor/urllib3/poolmanager.py +++ b/pipenv/vendor/urllib3/poolmanager.py @@ -1,24 +1,24 @@ from __future__ import absolute_import + import collections import functools import logging -import warnings from ._collections import RecentlyUsedContainer -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool -from .connectionpool import port_by_scheme +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme from .exceptions import ( LocationValueError, MaxRetryError, ProxySchemeUnknown, - InvalidProxyConfigurationWarning, + ProxySchemeUnsupported, + URLSchemeUnknown, ) from .packages import six from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods -from .util.url import parse_url +from .util.proxy import connection_requires_http_tunnel from .util.retry import Retry - +from .util.url import parse_url __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] @@ -59,6 +59,7 @@ "key_headers", # dict "key__proxy", # parsed proxy url "key__proxy_headers", # dict + "key__proxy_config", # class "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples "key__socks_options", # dict "key_assert_hostname", # bool or string @@ -70,6 +71,9 @@ #: All custom key schemes should include the fields in this key at a minimum. PoolKey = collections.namedtuple("PoolKey", _key_fields) +_proxy_config_fields = ("ssl_context", "use_forwarding_for_https") +ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields) + def _default_key_normalizer(key_class, request_context): """ @@ -161,6 +165,7 @@ class PoolManager(RequestMethods): """ proxy = None + proxy_config = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) @@ -182,7 +187,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def _new_pool(self, scheme, host, port, request_context=None): """ - Create a new :class:`ConnectionPool` based on host, port, scheme, and + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments @@ -218,7 +223,7 @@ def clear(self): def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ - Get a :class:`ConnectionPool` based on the host, port, and scheme. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is @@ -241,20 +246,22 @@ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None) def connection_from_context(self, request_context): """ - Get a :class:`ConnectionPool` based on the request context. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() - pool_key_constructor = self.key_fn_by_scheme[scheme] + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key, request_context=request_context) def connection_from_pool_key(self, pool_key, request_context=None): """ - Get a :class:`ConnectionPool` based on the provided pool key. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and @@ -312,9 +319,39 @@ def _merge_pool_kwargs(self, override): base_pool_kwargs[key] = value return base_pool_kwargs + def _proxy_requires_url_absolute_form(self, parsed_url): + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def _validate_proxy_scheme_url_selection(self, url_scheme): + """ + Validates that were not attempting to do TLS in TLS connections on + Python2 or with unsupported SSL implementations. + """ + if self.proxy is None or url_scheme != "https": + return + + if self.proxy.scheme != "https": + return + + if six.PY2 and not self.proxy_config.use_forwarding_for_https: + raise ProxySchemeUnsupported( + "Contacting HTTPS destinations through HTTPS proxies " + "'via CONNECT tunnels' is not supported in Python 2" + ) + def urlopen(self, method, url, redirect=True, **kw): """ - Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. @@ -322,6 +359,8 @@ def urlopen(self, method, url, redirect=True, **kw): :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw["assert_same_host"] = False @@ -330,7 +369,7 @@ def urlopen(self, method, url, redirect=True, **kw): if "headers" not in kw: kw["headers"] = self.headers.copy() - if self.proxy is not None and u.scheme == "http": + if self._proxy_requires_url_absolute_form(u): response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) @@ -392,6 +431,19 @@ class ProxyManager(PoolManager): HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') @@ -411,6 +463,8 @@ def __init__( num_pools=10, headers=None, proxy_headers=None, + proxy_ssl_context=None, + use_forwarding_for_https=False, **connection_pool_kw ): @@ -421,18 +475,22 @@ def __init__( proxy_url.port, ) proxy = parse_url(proxy_url) - if not proxy.port: - port = port_by_scheme.get(proxy.scheme, 80) - proxy = proxy._replace(port=port) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + self.proxy = proxy self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https) connection_pool_kw["_proxy"] = self.proxy connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) @@ -461,27 +519,13 @@ def _set_proxy_headers(self, url, headers=None): headers_.update(headers) return headers_ - def _validate_proxy_scheme_url_selection(self, url_scheme): - if url_scheme == "https" and self.proxy.scheme == "https": - warnings.warn( - "Your proxy configuration specified an HTTPS scheme for the proxy. " - "Are you sure you want to use HTTPS to contact the proxy? " - "This most likely indicates an error in your configuration. " - "Read this issue for more info: " - "https://github.com/urllib3/urllib3/issues/1850", - InvalidProxyConfigurationWarning, - stacklevel=3, - ) - def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) - self._validate_proxy_scheme_url_selection(u.scheme) - - if u.scheme == "http": - # For proxied HTTPS requests, httplib sets the necessary headers - # on the CONNECT to the proxy. For HTTP, we'll definitely - # need to set 'Host' at the very least. + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. headers = kw.get("headers", self.headers) kw["headers"] = self._set_proxy_headers(url, headers) diff --git a/pipenv/vendor/urllib3/request.py b/pipenv/vendor/urllib3/request.py index 55f160bbf1..398386a5b9 100644 --- a/pipenv/vendor/urllib3/request.py +++ b/pipenv/vendor/urllib3/request.py @@ -3,15 +3,14 @@ from .filepost import encode_multipart_formdata from .packages.six.moves.urllib.parse import urlencode - __all__ = ["RequestMethods"] class RequestMethods(object): """ Convenience mixin for classes who implement a :meth:`urlopen` method, such - as :class:`~urllib3.connectionpool.HTTPConnectionPool` and - :class:`~urllib3.poolmanager.PoolManager`. + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. @@ -111,9 +110,9 @@ def request_encode_body( the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + :func:`urllib3.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise - :meth:`urllib.urlencode` is used with the + :func:`urllib.parse.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably diff --git a/pipenv/vendor/urllib3/response.py b/pipenv/vendor/urllib3/response.py index 7dc9b93cae..38693f4fc6 100644 --- a/pipenv/vendor/urllib3/response.py +++ b/pipenv/vendor/urllib3/response.py @@ -1,10 +1,11 @@ from __future__ import absolute_import -from contextlib import contextmanager -import zlib + import io import logging -from socket import timeout as SocketTimeout +import zlib +from contextlib import contextmanager from socket import error as SocketError +from socket import timeout as SocketTimeout try: import brotli @@ -12,19 +13,20 @@ brotli = None from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPException from .exceptions import ( BodyNotHttplibCompatible, - ProtocolError, DecodeError, - ReadTimeoutError, - ResponseNotChunked, + HTTPError, IncompleteRead, + InvalidChunkLength, InvalidHeader, - HTTPError, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, ) -from .packages.six import string_types as basestring, PY3 -from .packages.six.moves import http_client as httplib -from .connection import HTTPException, BaseSSLError +from .packages import six from .util.response import is_fp_closed, is_response_to_head log = logging.getLogger(__name__) @@ -107,11 +109,10 @@ class BrotliDecoder(object): # are for 'brotlipy' and bottom branches for 'Brotli' def __init__(self): self._obj = brotli.Decompressor() - - def decompress(self, data): if hasattr(self._obj, "decompress"): - return self._obj.decompress(data) - return self._obj.process(data) + self.decompress = self._obj.decompress + else: + self.decompress = self._obj.process def flush(self): if hasattr(self._obj, "flush"): @@ -157,13 +158,13 @@ class HTTPResponse(io.IOBase): """ HTTP Response container. - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. - Extra parameters for behaviour not present in httplib.HTTPResponse: + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: :param preload_content: If True, the response's body will be preloaded during construction. @@ -173,7 +174,7 @@ class is also compatible with the Python standard library's :mod:`io` 'content-encoding' header. :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` object, it's convenient to include the original for debug purposes. It's otherwise unused. @@ -233,7 +234,7 @@ def __init__( self.msg = msg self._request_url = request_url - if body and isinstance(body, (basestring, bytes)): + if body and isinstance(body, (six.string_types, bytes)): self._body = body self._pool = pool @@ -291,7 +292,7 @@ def drain_conn(self): @property def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. + # For backwards-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body @@ -308,8 +309,8 @@ def isclosed(self): def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read @@ -443,10 +444,9 @@ def _error_catcher(self): except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? - if "read operation timed out" not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) raise ReadTimeoutError(self._pool, None, "Read timed out.") @@ -480,7 +480,7 @@ def _error_catcher(self): def read(self, amt=None, decode_content=None, cache_content=False): """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: @@ -581,7 +581,7 @@ def stream(self, amt=2 ** 16, decode_content=None): @classmethod def from_httplib(ResponseCls, r, **response_kw): """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a + Given an :class:`http.client.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along @@ -590,11 +590,11 @@ def from_httplib(ResponseCls, r, **response_kw): headers = r.msg if not isinstance(headers, HTTPHeaderDict): - if PY3: - headers = HTTPHeaderDict(headers.items()) - else: + if six.PY2: # Python 2.7 headers = HTTPHeaderDict.from_httplib(headers) + else: + headers = HTTPHeaderDict(headers.items()) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, "strict", 0) @@ -610,7 +610,7 @@ def from_httplib(ResponseCls, r, **response_kw): ) return resp - # Backwards-compatibility methods for httplib.HTTPResponse + # Backwards-compatibility methods for http.client.HTTPResponse def getheaders(self): return self.headers @@ -680,8 +680,8 @@ def readinto(self, b): def supports_chunked_reads(self): """ Checks if the underlying file-like object looks like a - httplib.HTTPResponse object. We do this by testing for the fp - attribute. If it is present we assume it returns raw chunks as + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ return hasattr(self._fp, "fp") @@ -698,7 +698,7 @@ def _update_chunk_length(self): except ValueError: # Invalid chunked protocol response, abort. self.close() - raise httplib.IncompleteRead(line) + raise InvalidChunkLength(self, line) def _handle_chunk(self, amt): returned_chunk = None @@ -745,7 +745,7 @@ def read_chunked(self, amt=None, decode_content=None): ) if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( - "Body should be httplib.HTTPResponse like. " + "Body should be http.client.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks." ) diff --git a/pipenv/vendor/urllib3/util/__init__.py b/pipenv/vendor/urllib3/util/__init__.py index a96c73a9d8..4547fc522b 100644 --- a/pipenv/vendor/urllib3/util/__init__.py +++ b/pipenv/vendor/urllib3/util/__init__.py @@ -2,23 +2,23 @@ # For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped -from .request import make_headers +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers from .response import is_fp_closed +from .retry import Retry from .ssl_ import ( - SSLContext, + ALPN_PROTOCOLS, HAS_SNI, IS_PYOPENSSL, IS_SECURETRANSPORT, + PROTOCOL_TLS, + SSLContext, assert_fingerprint, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, - PROTOCOL_TLS, ) -from .timeout import current_time, Timeout - -from .retry import Retry -from .url import get_host, parse_url, split_first, Url +from .timeout import Timeout, current_time +from .url import Url, get_host, parse_url, split_first from .wait import wait_for_read, wait_for_write __all__ = ( @@ -27,6 +27,7 @@ "IS_SECURETRANSPORT", "SSLContext", "PROTOCOL_TLS", + "ALPN_PROTOCOLS", "Retry", "Timeout", "Url", @@ -43,4 +44,6 @@ "ssl_wrap_socket", "wait_for_read", "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", ) diff --git a/pipenv/vendor/urllib3/util/connection.py b/pipenv/vendor/urllib3/util/connection.py index 86f0a3b00e..cd57455748 100644 --- a/pipenv/vendor/urllib3/util/connection.py +++ b/pipenv/vendor/urllib3/util/connection.py @@ -1,7 +1,12 @@ from __future__ import absolute_import + import socket -from .wait import NoWayToWaitForSocketError, wait_for_read + +from urllib3.exceptions import LocationParseError + from ..contrib import _appengine_environ +from ..packages import six +from .wait import NoWayToWaitForSocketError, wait_for_read def is_connection_dropped(conn): # Platform-specific @@ -9,7 +14,7 @@ def is_connection_dropped(conn): # Platform-specific Returns True if the connection is dropped and should be closed. :param conn: - :class:`httplib.HTTPConnection` object. + :class:`http.client.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. @@ -42,7 +47,7 @@ def create_connection( port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` + global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. @@ -58,6 +63,13 @@ def create_connection( # The original create_connection function always returns all records. family = allowed_gai_family() + try: + host.encode("idna") + except UnicodeError: + return six.raise_from( + LocationParseError(u"'%s', label empty or too long" % host), None + ) + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None diff --git a/pipenv/vendor/urllib3/util/proxy.py b/pipenv/vendor/urllib3/util/proxy.py new file mode 100644 index 0000000000..34f884d5b3 --- /dev/null +++ b/pipenv/vendor/urllib3/util/proxy.py @@ -0,0 +1,56 @@ +from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version + + +def connection_requires_http_tunnel( + proxy_url=None, proxy_config=None, destination_scheme=None +): + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True + + +def create_proxy_ssl_context( + ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None +): + """ + Generates a default proxy ssl context if one hasn't been provided by the + user. + """ + ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and hasattr(ssl_context, "load_default_certs") + ): + ssl_context.load_default_certs() + + return ssl_context diff --git a/pipenv/vendor/urllib3/util/queue.py b/pipenv/vendor/urllib3/util/queue.py index d3d379a199..41784104ee 100644 --- a/pipenv/vendor/urllib3/util/queue.py +++ b/pipenv/vendor/urllib3/util/queue.py @@ -1,4 +1,5 @@ import collections + from ..packages import six from ..packages.six.moves import queue diff --git a/pipenv/vendor/urllib3/util/request.py b/pipenv/vendor/urllib3/util/request.py index 3b7bb54daf..25103383ec 100644 --- a/pipenv/vendor/urllib3/util/request.py +++ b/pipenv/vendor/urllib3/util/request.py @@ -1,8 +1,16 @@ from __future__ import absolute_import + from base64 import b64encode -from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError +from ..packages.six import b, integer_types + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) ACCEPT_ENCODING = "gzip,deflate" try: diff --git a/pipenv/vendor/urllib3/util/response.py b/pipenv/vendor/urllib3/util/response.py index 715868dd10..5ea609cced 100644 --- a/pipenv/vendor/urllib3/util/response.py +++ b/pipenv/vendor/urllib3/util/response.py @@ -1,7 +1,9 @@ from __future__ import absolute_import -from ..packages.six.moves import http_client as httplib + +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect from ..exceptions import HeaderParsingError +from ..packages.six.moves import http_client as httplib def is_fp_closed(obj): @@ -42,8 +44,7 @@ def assert_header_parsing(headers): Only works on Python 3. - :param headers: Headers to verify. - :type headers: `httplib.HTTPMessage`. + :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. @@ -66,6 +67,25 @@ def assert_header_parsing(headers): if isinstance(payload, (bytes, str)): unparsed_data = payload + if defects: + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) @@ -76,8 +96,9 @@ def is_response_to_head(response): Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. - :param conn: - :type conn: :class:`httplib.HTTPResponse` + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. """ # FIXME: Can we do this somehow without accessing private httplib _method? method = response._method diff --git a/pipenv/vendor/urllib3/util/retry.py b/pipenv/vendor/urllib3/util/retry.py index ee30c91b14..ee51f922f8 100644 --- a/pipenv/vendor/urllib3/util/retry.py +++ b/pipenv/vendor/urllib3/util/retry.py @@ -1,23 +1,24 @@ from __future__ import absolute_import -import time + +import email import logging +import re +import time +import warnings from collections import namedtuple from itertools import takewhile -import email -import re from ..exceptions import ( ConnectTimeoutError, + InvalidHeader, MaxRetryError, ProtocolError, + ProxyError, ReadTimeoutError, ResponseError, - InvalidHeader, - ProxyError, ) from ..packages import six - log = logging.getLogger(__name__) @@ -27,8 +28,51 @@ ) +# TODO: In v2 we can remove this sentinel and metaclass with deprecated options. +_Default = object() + + +class _RetryMeta(type): + @property + def DEFAULT_METHOD_WHITELIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead", + DeprecationWarning, + ) + return cls.DEFAULT_ALLOWED_METHODS + + @DEFAULT_METHOD_WHITELIST.setter + def DEFAULT_METHOD_WHITELIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + cls.DEFAULT_ALLOWED_METHODS = value + + @property + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + + +@six.add_metaclass(_RetryMeta) class Retry(object): - """ Retry configuration. + """Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. @@ -54,8 +98,7 @@ class Retry(object): Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other - counts. It's a good idea to set this to some sensibly-high value to - account for unexpected edge cases and avoid infinite retry loops. + counts. Set to ``0`` to fail on the first retry. @@ -96,18 +139,35 @@ class Retry(object): Set to ``0`` to fail on the first retry of this type. - :param iterable method_whitelist: + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param iterable allowed_methods: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the - same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. Set to a ``False`` value to retry on any verb. + .. warning:: + + Previously this parameter was named ``method_whitelist``, that + usage is deprecated in v1.26.0 and will be removed in v2.0. + :param iterable status_forcelist: A set of integer HTTP status codes that we should force a retry on. - A retry is initiated if the request method is in ``method_whitelist`` + A retry is initiated if the request method is in ``allowed_methods`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. @@ -148,13 +208,16 @@ class Retry(object): request. """ - DEFAULT_METHOD_WHITELIST = frozenset( + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] ) + #: Default status codes to be used for ``status_forcelist`` RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"]) + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) #: Maximum backoff time. BACKOFF_MAX = 120 @@ -166,20 +229,42 @@ def __init__( read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, + other=None, + allowed_methods=_Default, status_forcelist=None, backoff_factor=0, raise_on_redirect=True, raise_on_status=True, history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST, + remove_headers_on_redirect=_Default, + # TODO: Deprecated, remove in v2.0 + method_whitelist=_Default, ): + if method_whitelist is not _Default: + if allowed_methods is not _Default: + raise ValueError( + "Using both 'allowed_methods' and " + "'method_whitelist' together is not allowed. " + "Instead only use 'allowed_methods'" + ) + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = method_whitelist + if allowed_methods is _Default: + allowed_methods = self.DEFAULT_ALLOWED_METHODS + if remove_headers_on_redirect is _Default: + remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + self.total = total self.connect = connect self.read = read self.status = status + self.other = other if redirect is False or total is False: redirect = 0 @@ -187,7 +272,7 @@ def __init__( self.redirect = redirect self.status_forcelist = status_forcelist or set() - self.method_whitelist = method_whitelist + self.allowed_methods = allowed_methods self.backoff_factor = backoff_factor self.raise_on_redirect = raise_on_redirect self.raise_on_status = raise_on_status @@ -204,7 +289,7 @@ def new(self, **kw): read=self.read, redirect=self.redirect, status=self.status, - method_whitelist=self.method_whitelist, + other=self.other, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, @@ -213,6 +298,23 @@ def new(self, **kw): remove_headers_on_redirect=self.remove_headers_on_redirect, respect_retry_after_header=self.respect_retry_after_header, ) + + # TODO: If already given in **kw we use what's given to us + # If not given we need to figure out what to pass. We decide + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise + # we use 'allowed_methods'. Remove in v2.0 + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + params["method_whitelist"] = self.allowed_methods + else: + params["allowed_methods"] = self.allowed_methods + params.update(kw) return type(self)(**params) @@ -231,7 +333,7 @@ def from_int(cls, retries, redirect=True, default=None): return new_retries def get_backoff_time(self): - """ Formula for computing the current backoff + """Formula for computing the current backoff :rtype: float """ @@ -252,10 +354,17 @@ def parse_retry_after(self, retry_after): if re.match(r"^\s*[0-9]+\s*$", retry_after): seconds = int(retry_after) else: - retry_date_tuple = email.utils.parsedate(retry_after) + retry_date_tuple = email.utils.parsedate_tz(retry_after) if retry_date_tuple is None: raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) - retry_date = time.mktime(retry_date_tuple) + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) seconds = retry_date - time.time() if seconds < 0: @@ -288,7 +397,7 @@ def _sleep_backoff(self): time.sleep(backoff) def sleep(self, response=None): - """ Sleep between retry attempts. + """Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it @@ -304,7 +413,7 @@ def sleep(self, response=None): self._sleep_backoff() def _is_connection_error(self, err): - """ Errors when we're fairly sure that the server did not receive the + """Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ if isinstance(err, ProxyError): @@ -312,22 +421,33 @@ def _is_connection_error(self, err): return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err): - """ Errors that occur after the request has been started, so we should + """Errors that occur after the request has been started, so we should assume that the server began processing it. """ return isinstance(err, (ReadTimeoutError, ProtocolError)) def _is_method_retryable(self, method): - """ Checks if a given HTTP method should be retried upon, depending if - it is included on the method whitelist. + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods """ - if self.method_whitelist and method.upper() not in self.method_whitelist: - return False + # TODO: For now favor if the Retry implementation sets its own method_whitelist + # property outside of our constructor to avoid breaking custom implementations. + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = self.method_whitelist + else: + allowed_methods = self.allowed_methods + if allowed_methods and method.upper() not in allowed_methods: + return False return True def is_retry(self, method, status_code, has_retry_after=False): - """ Is this method/status code retryable? (Based on whitelists and control + """Is this method/status code retryable? (Based on allowlists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to @@ -348,7 +468,14 @@ def is_retry(self, method, status_code, has_retry_after=False): def is_exhausted(self): """ Are we out of retries? """ - retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) + retry_counts = ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) retry_counts = list(filter(None, retry_counts)) if not retry_counts: return False @@ -364,7 +491,7 @@ def increment( _pool=None, _stacktrace=None, ): - """ Return a new Retry object with incremented retry counters. + """Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. @@ -386,6 +513,7 @@ def increment( read = self.read redirect = self.redirect status_count = self.status + other = self.other cause = "unknown" status = None redirect_location = None @@ -404,6 +532,11 @@ def increment( elif read is not None: read -= 1 + elif error: + # Other retry? + if other is not None: + other -= 1 + elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: @@ -414,7 +547,7 @@ def increment( else: # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist + # status_forcelist and the given method is in the allowed_methods cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: @@ -432,6 +565,7 @@ def increment( read=read, redirect=redirect, status=status_count, + other=other, history=history, ) @@ -448,6 +582,20 @@ def __repr__(self): "read={self.read}, redirect={self.redirect}, status={self.status})" ).format(cls=type(self), self=self) + def __getattr__(self, item): + if item == "method_whitelist": + # TODO: Remove this deprecated alias in v2.0 + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + return self.allowed_methods + try: + return getattr(super(Retry, self), item) + except AttributeError: + return getattr(Retry, item) + # For backwards compatibility (equivalent to pre-v1.9): Retry.DEFAULT = Retry(3) diff --git a/pipenv/vendor/urllib3/util/ssl_.py b/pipenv/vendor/urllib3/util/ssl_.py index f7e2b70558..1cb5e7cdc1 100644 --- a/pipenv/vendor/urllib3/util/ssl_.py +++ b/pipenv/vendor/urllib3/util/ssl_.py @@ -1,21 +1,27 @@ from __future__ import absolute_import -import errno -import warnings + import hmac +import os import sys - +import warnings from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 -from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE -from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning +from ..exceptions import ( + InsecurePlatformWarning, + ProxySchemeUnsupported, + SNIMissingWarning, + SSLError, +) from ..packages import six - +from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE SSLContext = None +SSLTransport = None HAS_SNI = False IS_PYOPENSSL = False IS_SECURETRANSPORT = False +ALPN_PROTOCOLS = ["http/1.1"] # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} @@ -29,8 +35,8 @@ def _const_compare_digest_backport(a, b): Returns True if the digests match, and False otherwise. """ result = abs(len(a) - len(b)) - for l, r in zip(bytearray(a), bytearray(b)): - result |= l ^ r + for left, right in zip(bytearray(a), bytearray(b)): + result |= left ^ right return result == 0 @@ -38,8 +44,10 @@ def _const_compare_digest_backport(a, b): try: # Test for SSL features import ssl - from ssl import wrap_socket, CERT_REQUIRED from ssl import HAS_SNI # Has SNI? + from ssl import CERT_REQUIRED, wrap_socket + + from .ssltransport import SSLTransport except ImportError: pass @@ -57,12 +65,18 @@ def _const_compare_digest_backport(a, b): try: - from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION + from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3 except ImportError: OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 +try: # OP_NO_TICKET was added in Python 3.6 + from ssl import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = 0x4000 + + # A secure default. # Sources for more information on TLS ciphers: # @@ -249,7 +263,7 @@ def create_urllib3_context( ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, - ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. :returns: @@ -272,6 +286,11 @@ def create_urllib3_context( # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # (issue #309) options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET context.options |= options @@ -293,6 +312,14 @@ def create_urllib3_context( # We do our own verification, including fingerprints and alternative # hostnames. So disable it here context.check_hostname = False + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + return context @@ -309,6 +336,7 @@ def ssl_wrap_socket( ca_cert_dir=None, key_password=None, ca_cert_data=None, + tls_in_tls=False, ): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have @@ -330,6 +358,8 @@ def ssl_wrap_socket( :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. """ context = ssl_context if context is None: @@ -341,14 +371,8 @@ def ssl_wrap_socket( if ca_certs or ca_cert_dir or ca_cert_data: try: context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) - except IOError as e: # Platform-specific: Python 2.7 + except (IOError, OSError) as e: raise SSLError(e) - # Py33 raises FileNotFoundError which subclasses OSError - # These are not equivalent unless we check the errno attribute - except OSError as e: # Platform-specific: Python 3.3 and beyond - if e.errno == errno.ENOENT: - raise SSLError(e) - raise elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows (require Python3.4+) @@ -366,16 +390,21 @@ def ssl_wrap_socket( else: context.load_cert_chain(certfile, keyfile, key_password) + try: + if hasattr(context, "set_alpn_protocols"): + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: + pass + # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 - # We shouldn't warn the user if SNI isn't available but we would - # not be using SNI anyways due to IP address for server_hostname. - if ( - server_hostname is not None and not is_ipaddress(server_hostname) - ) or IS_SECURETRANSPORT: - if HAS_SNI and server_hostname is not None: - return context.wrap_socket(sock, server_hostname=server_hostname) - + use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) + # SecureTransport uses server_hostname in certificate verification. + send_sni = (use_sni_hostname and HAS_SNI) or ( + IS_SECURETRANSPORT and server_hostname + ) + # Do not warn the user if server_hostname is an invalid SNI hostname. + if not HAS_SNI and use_sni_hostname: warnings.warn( "An HTTPS request has been made, but the SNI (Server Name " "Indication) extension to TLS is not available on this platform. " @@ -387,7 +416,13 @@ def ssl_wrap_socket( SNIMissingWarning, ) - return context.wrap_socket(sock) + if send_sni: + ssl_sock = _ssl_wrap_socket_impl( + sock, context, tls_in_tls, server_hostname=server_hostname + ) + else: + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) + return ssl_sock def is_ipaddress(hostname): @@ -412,3 +447,20 @@ def _is_key_file_encrypted(key_file): return True return False + + +def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None): + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + if server_hostname: + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) + else: + return ssl_context.wrap_socket(sock) diff --git a/pipenv/vendor/urllib3/util/ssltransport.py b/pipenv/vendor/urllib3/util/ssltransport.py new file mode 100644 index 0000000000..1e41354f5d --- /dev/null +++ b/pipenv/vendor/urllib3/util/ssltransport.py @@ -0,0 +1,221 @@ +import io +import socket +import ssl + +from urllib3.exceptions import ProxySchemeUnsupported +from urllib3.packages import six + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context): + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + if six.PY2: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "supported on Python 2" + ) + else: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True + ): + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self): + return self + + def __exit__(self, *_): + self.close() + + def fileno(self): + return self.socket.fileno() + + def read(self, len=1024, buffer=None): + return self._wrap_ssl_read(len, buffer) + + def recv(self, len=1024, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(len) + + def recv_into(self, buffer, nbytes=None, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + return self.read(nbytes, buffer) + + def sendall(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + response = self._ssl_io_loop(self.sslobj.write, data) + return response + + def makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None + ): + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) + self.socket._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def unwrap(self): + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self): + self.socket.close() + + def getpeercert(self, binary_form=False): + return self.sslobj.getpeercert(binary_form) + + def version(self): + return self.sslobj.version() + + def cipher(self): + return self.sslobj.cipher() + + def selected_alpn_protocol(self): + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self): + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self): + return self.sslobj.shared_ciphers() + + def compression(self): + return self.sslobj.compression() + + def settimeout(self, value): + self.socket.settimeout(value) + + def gettimeout(self): + return self.socket.gettimeout() + + def _decref_socketios(self): + self.socket._decref_socketios() + + def _wrap_ssl_read(self, len, buffer=None): + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + def _ssl_io_loop(self, func, *args): + """ Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return ret diff --git a/pipenv/vendor/urllib3/util/timeout.py b/pipenv/vendor/urllib3/util/timeout.py index b61fea75c5..ff69593b05 100644 --- a/pipenv/vendor/urllib3/util/timeout.py +++ b/pipenv/vendor/urllib3/util/timeout.py @@ -1,9 +1,10 @@ from __future__ import absolute_import +import time + # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT -import time from ..exceptions import TimeoutStateError @@ -17,22 +18,28 @@ class Timeout(object): - """ Timeout configuration. + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') - Timeouts can be defined as a default for a pool:: + Or per-request (which overrides the default for the pool): - timeout = Timeout(connect=2.0, read=7.0) - http = PoolManager(timeout=timeout) - response = http.request('GET', 'http://example.com/') + .. code-block:: python - Or per-request (which overrides the default for the pool):: + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) - response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + Timeouts can be disabled by setting all the parameters to ``None``: - Timeouts can be disabled by setting all the parameters to ``None``:: + .. code-block:: python - no_timeout = Timeout(connect=None, read=None) - response = http.request('GET', 'http://example.com/, timeout=no_timeout) + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: @@ -43,7 +50,7 @@ class Timeout(object): Defaults to None. - :type total: integer, float, or None + :type total: int, float, or None :param connect: The maximum amount of time (in seconds) to wait for a connection @@ -53,7 +60,7 @@ class Timeout(object): `_. None will set an infinite timeout for connection attempts. - :type connect: integer, float, or None + :type connect: int, float, or None :param read: The maximum amount of time (in seconds) to wait between consecutive @@ -63,7 +70,7 @@ class Timeout(object): `_. None will set an infinite timeout. - :type read: integer, float, or None + :type read: int, float, or None .. note:: @@ -111,7 +118,7 @@ def __repr__(self): @classmethod def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid. + """Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is @@ -157,7 +164,7 @@ def _validate_timeout(cls, value, name): @classmethod def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. + """Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` @@ -172,7 +179,7 @@ def from_float(cls, timeout): return Timeout(read=timeout, connect=timeout) def clone(self): - """ Create a copy of the timeout object + """Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. @@ -186,7 +193,7 @@ def clone(self): return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): - """ Start the timeout clock, used during a connect() attempt + """Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. @@ -197,7 +204,7 @@ def start_connect(self): return self._start_connect def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. + """Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time in seconds. :rtype: float @@ -212,7 +219,7 @@ def get_connect_duration(self): @property def connect_timeout(self): - """ Get the value to use when setting a connection timeout. + """Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. @@ -230,7 +237,7 @@ def connect_timeout(self): @property def read_timeout(self): - """ Get the value for the read timeout. + """Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. diff --git a/pipenv/vendor/urllib3/util/url.py b/pipenv/vendor/urllib3/util/url.py index 793324e5fd..6ff238fe3c 100644 --- a/pipenv/vendor/urllib3/util/url.py +++ b/pipenv/vendor/urllib3/util/url.py @@ -1,11 +1,11 @@ from __future__ import absolute_import + import re from collections import namedtuple from ..exceptions import LocationParseError from ..packages import six - url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] # We only want to normalize urls with an HTTP(S) scheme. diff --git a/pipenv/vendor/urllib3/util/wait.py b/pipenv/vendor/urllib3/util/wait.py index d71d2fd722..c280646c7b 100644 --- a/pipenv/vendor/urllib3/util/wait.py +++ b/pipenv/vendor/urllib3/util/wait.py @@ -1,7 +1,7 @@ import errno -from functools import partial import select import sys +from functools import partial try: from time import monotonic @@ -140,14 +140,14 @@ def wait_for_socket(*args, **kwargs): def wait_for_read(sock, timeout=None): - """ Waits for reading to be available on a given socket. + """Waits for reading to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, read=True, timeout=timeout) def wait_for_write(sock, timeout=None): - """ Waits for writing to be available on a given socket. + """Waits for writing to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 117553deca..6b448b035c 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -1,59 +1,57 @@ appdirs==1.4.4 -backports.shutil_get_terminal_size==1.0.0 +backports.shutil-get-terminal-size==1.0.0 backports.weakref==1.0.post1 click==7.1.2 click-completion==0.5.2 click-didyoumean==0.0.3 -colorama==0.4.3 +colorama==0.4.4 delegator.py==0.1.1 pexpect==4.8.0 ptyprocess==0.6.0 -python-dotenv==0.10.3 -first==2.0.1 -iso8601==0.1.12 +python-dotenv==0.15.0 +first==2.0.2 +iso8601==0.1.13 jinja2==2.11.2 -markupsafe==1.1.1 -parse==1.15.0 + markupsafe==1.1.1 +parse==1.18.0 pathlib2==2.3.5 - scandir==1.10 -pipdeptree==0.13.2 + scandir==1.10.0 +pipdeptree==1.0.0 pipreqs==0.4.10 docopt==0.6.2 yarg==0.1.9 pythonfinder==1.2.5 -requests==2.23.0 +requests==2.25.0 chardet==3.0.4 - idna==2.9 - urllib3==1.25.9 - certifi==2020.4.5.1 -requirementslib==1.5.15 - attrs==19.3.0 - distlib==0.3.0 - packaging==20.3 + idna==2.10 + urllib3==1.26.1 + certifi==2020.11.8 +requirementslib==1.5.16 + attrs==20.3.0 + distlib==0.3.1 + packaging==20.4 pyparsing==2.4.7 plette==0.2.3 tomlkit==0.7.0 shellingham==1.3.2 -six==1.14.0 -semver==2.9.0 -toml==0.10.1 -cached-property==1.5.1 +six==1.15.0 +semver==2.13.0 +toml==0.10.2 +cached-property==1.5.2 vistir==0.5.2 pip-shims==0.5.3 contextlib2==0.6.0.post1 funcsigs==1.0.2 enum34==1.1.10 -# yaspin==0.15.0 -yaspin==0.14.3 +yaspin==1.2.0 cerberus==1.3.2 -resolvelib==0.3.0 -backports.functools_lru_cache==1.6.1 -pep517==0.8.2 - zipp==0.6.0 - importlib_metadata==1.6.0 - importlib-resources==1.5.0 +resolvelib==0.5.2 +backports.functools-lru-cache==1.6.1 +pep517==0.9.1 + zipp==1.2.0 + importlib-metadata==2.0.0 + importlib-resources==3.3.0 more-itertools==5.0.0 -git+https://github.com/sarugaku/passa.git@master#egg=passa orderedmultidict==1.0.1 dparse==0.5.0 python-dateutil==2.8.1 diff --git a/pipenv/vendor/yaspin/LICENSE b/pipenv/vendor/yaspin/LICENSE index 2458104e5a..5106a1019d 100644 --- a/pipenv/vendor/yaspin/LICENSE +++ b/pipenv/vendor/yaspin/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2018 Pavlo Dmytrenko +Copyright (c) 2020 Pavlo Dmytrenko Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pipenv/vendor/yaspin/__init__.py b/pipenv/vendor/yaspin/__init__.py index 57853a1389..818aeb4707 100644 --- a/pipenv/vendor/yaspin/__init__.py +++ b/pipenv/vendor/yaspin/__init__.py @@ -1,8 +1,10 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. from __future__ import absolute_import -from .__version__ import __version__ # noqa from .api import kbi_safe_yaspin, yaspin from .base_spinner import Spinner diff --git a/pipenv/vendor/yaspin/__version__.py b/pipenv/vendor/yaspin/__version__.py deleted file mode 100644 index 23f00709c1..0000000000 --- a/pipenv/vendor/yaspin/__version__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.14.3" diff --git a/pipenv/vendor/yaspin/api.py b/pipenv/vendor/yaspin/api.py index f59ce00269..3e0c22ef91 100644 --- a/pipenv/vendor/yaspin/api.py +++ b/pipenv/vendor/yaspin/api.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.api ~~~~~~~~~~ This module implements the Yaspin API. - -:copyright: (c) 2018 by Pavlo Dmytrenko. -:license: MIT, see LICENSE for more details. """ import signal diff --git a/pipenv/vendor/yaspin/base_spinner.py b/pipenv/vendor/yaspin/base_spinner.py index 537ff79985..bd3b2668dd 100644 --- a/pipenv/vendor/yaspin/base_spinner.py +++ b/pipenv/vendor/yaspin/base_spinner.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.base_spinner diff --git a/pipenv/vendor/yaspin/compat.py b/pipenv/vendor/yaspin/compat.py index 744de5a1eb..b23050b689 100644 --- a/pipenv/vendor/yaspin/compat.py +++ b/pipenv/vendor/yaspin/compat.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.compat diff --git a/pipenv/vendor/yaspin/constants.py b/pipenv/vendor/yaspin/constants.py index b26baabe8f..4f2ee8bf20 100644 --- a/pipenv/vendor/yaspin/constants.py +++ b/pipenv/vendor/yaspin/constants.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.constants @@ -38,12 +41,14 @@ # Get spinner names: # $ < yaspin/data/spinners.json | jq '. | keys' SPINNER_ATTRS = [ + "aesthetic", "arc", "arrow", "arrow2", "arrow3", "balloon", "balloon2", + "betaWave", "bounce", "bouncingBall", "bouncingBar", @@ -65,6 +70,7 @@ "dots6", "dots7", "dots8", + "dots8Bit", "dots9", "dqpb", "earth", @@ -77,6 +83,7 @@ "layer", "line", "line2", + "material", "monkey", "moon", "noise", diff --git a/pipenv/vendor/yaspin/core.py b/pipenv/vendor/yaspin/core.py index 12960b3b74..a215a46f51 100644 --- a/pipenv/vendor/yaspin/core.py +++ b/pipenv/vendor/yaspin/core.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.yaspin @@ -9,6 +12,7 @@ from __future__ import absolute_import +import contextlib import functools import itertools import signal @@ -25,7 +29,6 @@ from .helpers import to_unicode from .termcolor import colored - colorama.init() @@ -83,6 +86,7 @@ def __init__( self._spin_thread = None self._last_frame = None self._stdout_lock = threading.Lock() + self._hidden_level = 0 # Signals @@ -266,6 +270,20 @@ def hide(self): # can be rewritten to sys.stdout.flush() + @contextlib.contextmanager + def hidden(self): + """Hide the spinner within a block, can be nested""" + if self._hidden_level == 0: + self.hide() + self._hidden_level += 1 + + try: + yield + finally: + self._hidden_level -= 1 + if self._hidden_level == 0: + self.show() + def show(self): """Show the hidden spinner.""" thr_is_alive = self._spin_thread and self._spin_thread.is_alive() @@ -461,9 +479,6 @@ def _set_attrs(attrs): @staticmethod def _set_spinner(spinner): - if not spinner: - sp = default_spinner - if hasattr(spinner, "frames") and hasattr(spinner, "interval"): if not spinner.frames or not spinner.interval: sp = default_spinner diff --git a/pipenv/vendor/yaspin/data/spinners.json b/pipenv/vendor/yaspin/data/spinners.json index b388b2a581..e3ff32d16b 100644 --- a/pipenv/vendor/yaspin/data/spinners.json +++ b/pipenv/vendor/yaspin/data/spinners.json @@ -274,6 +274,267 @@ "⠀⡀" ] }, + "dots8Bit": { + "interval": 80, + "frames": [ + "⠀", + "⠁", + "⠂", + "⠃", + "⠄", + "⠅", + "⠆", + "⠇", + "⡀", + "⡁", + "⡂", + "⡃", + "⡄", + "⡅", + "⡆", + "⡇", + "⠈", + "⠉", + "⠊", + "⠋", + "⠌", + "⠍", + "⠎", + "⠏", + "⡈", + "⡉", + "⡊", + "⡋", + "⡌", + "⡍", + "⡎", + "⡏", + "⠐", + "⠑", + "⠒", + "⠓", + "⠔", + "⠕", + "⠖", + "⠗", + "⡐", + "⡑", + "⡒", + "⡓", + "⡔", + "⡕", + "⡖", + "⡗", + "⠘", + "⠙", + "⠚", + "⠛", + "⠜", + "⠝", + "⠞", + "⠟", + "⡘", + "⡙", + "⡚", + "⡛", + "⡜", + "⡝", + "⡞", + "⡟", + "⠠", + "⠡", + "⠢", + "⠣", + "⠤", + "⠥", + "⠦", + "⠧", + "⡠", + "⡡", + "⡢", + "⡣", + "⡤", + "⡥", + "⡦", + "⡧", + "⠨", + "⠩", + "⠪", + "⠫", + "⠬", + "⠭", + "⠮", + "⠯", + "⡨", + "⡩", + "⡪", + "⡫", + "⡬", + "⡭", + "⡮", + "⡯", + "⠰", + "⠱", + "⠲", + "⠳", + "⠴", + "⠵", + "⠶", + "⠷", + "⡰", + "⡱", + "⡲", + "⡳", + "⡴", + "⡵", + "⡶", + "⡷", + "⠸", + "⠹", + "⠺", + "⠻", + "⠼", + "⠽", + "⠾", + "⠿", + "⡸", + "⡹", + "⡺", + "⡻", + "⡼", + "⡽", + "⡾", + "⡿", + "⢀", + "⢁", + "⢂", + "⢃", + "⢄", + "⢅", + "⢆", + "⢇", + "⣀", + "⣁", + "⣂", + "⣃", + "⣄", + "⣅", + "⣆", + "⣇", + "⢈", + "⢉", + "⢊", + "⢋", + "⢌", + "⢍", + "⢎", + "⢏", + "⣈", + "⣉", + "⣊", + "⣋", + "⣌", + "⣍", + "⣎", + "⣏", + "⢐", + "⢑", + "⢒", + "⢓", + "⢔", + "⢕", + "⢖", + "⢗", + "⣐", + "⣑", + "⣒", + "⣓", + "⣔", + "⣕", + "⣖", + "⣗", + "⢘", + "⢙", + "⢚", + "⢛", + "⢜", + "⢝", + "⢞", + "⢟", + "⣘", + "⣙", + "⣚", + "⣛", + "⣜", + "⣝", + "⣞", + "⣟", + "⢠", + "⢡", + "⢢", + "⢣", + "⢤", + "⢥", + "⢦", + "⢧", + "⣠", + "⣡", + "⣢", + "⣣", + "⣤", + "⣥", + "⣦", + "⣧", + "⢨", + "⢩", + "⢪", + "⢫", + "⢬", + "⢭", + "⢮", + "⢯", + "⣨", + "⣩", + "⣪", + "⣫", + "⣬", + "⣭", + "⣮", + "⣯", + "⢰", + "⢱", + "⢲", + "⢳", + "⢴", + "⢵", + "⢶", + "⢷", + "⣰", + "⣱", + "⣲", + "⣳", + "⣴", + "⣵", + "⣶", + "⣷", + "⢸", + "⢹", + "⢺", + "⢻", + "⢼", + "⢽", + "⢾", + "⢿", + "⣸", + "⣹", + "⣺", + "⣻", + "⣼", + "⣽", + "⣾", + "⣿" + ] + }, "line": { "interval": 130, "frames": [ @@ -742,6 +1003,103 @@ "🌏 " ] }, + "material": { + "interval": 17, + "frames": [ + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███████▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "██████████▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "█████████████▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁██████████████▁▁▁▁", + "▁▁▁██████████████▁▁▁", + "▁▁▁▁█████████████▁▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁▁▁████████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁██████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁▁█████████████▁▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁▁███████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁▁█████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁" + ] + }, "moon": { "interval": 80, "frames": [ @@ -878,7 +1236,7 @@ "، ", "′ ", " ´ ", - " ‾ ", + " ‾ ", " ⸌", " ⸊", " |", @@ -908,5 +1266,30 @@ "=", "≡" ] + }, + "betaWave": { + "interval": 80, + "frames": [ + "ρββββββ", + "βρβββββ", + "ββρββββ", + "βββρβββ", + "ββββρββ", + "βββββρβ", + "ββββββρ" + ] + }, + "aesthetic": { + "interval": 80, + "frames": [ + "▰▱▱▱▱▱▱", + "▰▰▱▱▱▱▱", + "▰▰▰▱▱▱▱", + "▰▰▰▰▱▱▱", + "▰▰▰▰▰▱▱", + "▰▰▰▰▰▰▱", + "▰▰▰▰▰▰▰", + "▰▱▱▱▱▱▱" + ] } } diff --git a/pipenv/vendor/yaspin/helpers.py b/pipenv/vendor/yaspin/helpers.py index 49ce0d06ca..374f296cdf 100644 --- a/pipenv/vendor/yaspin/helpers.py +++ b/pipenv/vendor/yaspin/helpers.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.helpers diff --git a/pipenv/vendor/yaspin/signal_handlers.py b/pipenv/vendor/yaspin/signal_handlers.py index f38f5d6b78..d516bf966c 100644 --- a/pipenv/vendor/yaspin/signal_handlers.py +++ b/pipenv/vendor/yaspin/signal_handlers.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.signal_handlers diff --git a/pipenv/vendor/yaspin/spinners.py b/pipenv/vendor/yaspin/spinners.py index 60822a2c67..e4a1639c2d 100644 --- a/pipenv/vendor/yaspin/spinners.py +++ b/pipenv/vendor/yaspin/spinners.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +# +# :copyright: (c) 2020 by Pavlo Dmytrenko. +# :license: MIT, see LICENSE for more details. """ yaspin.spinners @@ -7,20 +10,23 @@ A collection of cli spinners. """ -import codecs -import os +import pkgutil from collections import namedtuple -import json +try: + import simplejson as json +except ImportError: + import json -THIS_DIR = os.path.dirname(os.path.realpath(__file__)) -SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json") + +SPINNERS_DATA = pkgutil.get_data(__name__, "data/spinners.json").decode( + "utf-8" +) def _hook(dct): return namedtuple("Spinner", dct.keys())(*dct.values()) -with codecs.open(SPINNERS_PATH, encoding="utf-8") as f: - Spinners = json.load(f, object_hook=_hook) +Spinners = json.loads(SPINNERS_DATA, object_hook=_hook) diff --git a/pipenv/vendor/zipp.LICENSE b/pipenv/vendor/zipp.LICENSE index 5e795a61f3..353924be0e 100644 --- a/pipenv/vendor/zipp.LICENSE +++ b/pipenv/vendor/zipp.LICENSE @@ -1,7 +1,19 @@ Copyright Jason R. Coombs -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/pipenv/vendor/zipp.py b/pipenv/vendor/zipp.py index 8ab7d09908..25ef06e929 100644 --- a/pipenv/vendor/zipp.py +++ b/pipenv/vendor/zipp.py @@ -1,17 +1,15 @@ -# coding: utf-8 - -from __future__ import division - import io -import sys import posixpath import zipfile -import functools import itertools +import contextlib +import sys +import pathlib -import more_itertools - -__metaclass__ = type +if sys.version_info < (3, 7): + from collections import OrderedDict +else: + OrderedDict = dict def _parents(path): @@ -55,6 +53,97 @@ def _ancestry(path): path, tail = posixpath.split(path) +_dedupe = OrderedDict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class CompleteDirs(zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(_pathlib_compat(source)) + + # Only allow for FastLookup when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super(FastLookup, self).namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super(FastLookup, self)._name_set() + return self.__lookup + + +def _pathlib_compat(path): + """ + For path-like objects, convert to a filename for compatibility + on Python 3.6.1 and earlier. + """ + try: + return path.__fspath__() + except AttributeError: + return str(path) + + class Path: """ A pathlib-compatible interface for zip files. @@ -73,7 +162,7 @@ class Path: >>> zf.writestr('a.txt', 'content of a') >>> zf.writestr('b/c.txt', 'content of c') >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'abcde.zip' + >>> zf.filename = 'mem/abcde.zip' Path accepts the zipfile object itself or a filename @@ -85,9 +174,9 @@ class Path: >>> a, b = root.iterdir() >>> a - Path('abcde.zip', 'a.txt') + Path('mem/abcde.zip', 'a.txt') >>> b - Path('abcde.zip', 'b/') + Path('mem/abcde.zip', 'b/') name property: @@ -98,7 +187,7 @@ class Path: >>> c = b / 'c.txt' >>> c - Path('abcde.zip', 'b/c.txt') + Path('mem/abcde.zip', 'b/c.txt') >>> c.name 'c.txt' @@ -116,66 +205,91 @@ class Path: Coercion to string: - >>> str(c) - 'abcde.zip/b/c.txt' + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' """ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" def __init__(self, root, at=""): - self.root = ( - root - if isinstance(root, zipfile.ZipFile) - else zipfile.ZipFile(self._pathlib_compat(root)) - ) + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) self.at = at - @staticmethod - def _pathlib_compat(path): + def open(self, mode='r', *args, pwd=None, **kwargs): """ - For path-like objects, convert to a filename for compatibility - on Python 3.6.1 and earlier. + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). """ - try: - return path.__fspath__() - except AttributeError: - return str(path) + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + return io.TextIOWrapper(stream, *args, **kwargs) @property - def open(self): - return functools.partial(self.root.open, self.at) + def name(self): + return pathlib.Path(self.at).name or self.filename.name @property - def name(self): - return posixpath.basename(self.at.rstrip("/")) + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) def read_text(self, *args, **kwargs): - with self.open() as strm: - return io.TextIOWrapper(strm, *args, **kwargs).read() + with self.open('r', *args, **kwargs) as strm: + return strm.read() def read_bytes(self): - with self.open() as strm: + with self.open('rb') as strm: return strm.read() def _is_child(self, path): return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") def _next(self, at): - return Path(self.root, at) + return self.__class__(self.root, at) def is_dir(self): return not self.at or self.at.endswith("/") def is_file(self): - return not self.is_dir() + return self.exists() and not self.is_dir() def exists(self): - return self.at in self._names() + return self.at in self.root._name_set() def iterdir(self): if not self.is_dir(): raise ValueError("Can't listdir a file") - subs = map(self._next, self._names()) + subs = map(self._next, self.root.namelist()) return filter(self._is_child, subs) def __str__(self): @@ -184,37 +298,17 @@ def __str__(self): def __repr__(self): return self.__repr.format(self=self) - def joinpath(self, add): - add = self._pathlib_compat(add) - next = posixpath.join(self.at, add) - next_dir = posixpath.join(self.at, add, "") - names = self._names() - return self._next(next_dir if next not in names and next_dir in names else next) + def joinpath(self, *other): + next = posixpath.join(self.at, *map(_pathlib_compat, other)) + return self._next(self.root.resolve_dir(next)) __truediv__ = joinpath - @staticmethod - def _implied_dirs(names): - return more_itertools.unique_everseen( - parent + "/" - for name in names - for parent in _parents(name) - if parent + "/" not in names - ) - - @classmethod - def _add_implied_dirs(cls, names): - return names + list(cls._implied_dirs(names)) - @property def parent(self): + if not self.at: + return self.filename.parent parent_at = posixpath.dirname(self.at.rstrip('/')) if parent_at: parent_at += '/' return self._next(parent_at) - - def _names(self): - return self._add_implied_dirs(self.root.namelist()) - - if sys.version_info < (3,): - __div__ = __truediv__ diff --git a/tasks/release.py b/tasks/release.py index a5dbc1bf9b..201225c2f9 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -81,6 +81,7 @@ def _render_log(): "month_offset": "How many months to offset the release date by.", } + @invoke.task(help=release_help) def release(ctx, manual=False, local=False, dry_run=False, pre=False, tag=None, month_offset="0"): trunc_month = False @@ -128,7 +129,7 @@ def release(ctx, manual=False, local=False, dry_run=False, pre=False, tag=None, draft_rstfile = "CHANGELOG.draft.rst" markdown_path = pathlib.Path(draft_rstfile).with_suffix(".md") generate_markdown(ctx, source_rstfile=draft_rstfile) - content = clean_mdchangelog(ctx, markdown_path.as_posix()) + clean_mdchangelog(ctx, markdown_path.as_posix()) log(f"would generate markdown: {markdown_path.read_text()}") if pre and not dry_run: ctx.run(f'git tag -a v{version} -m "Version v{version}\n\n{tag_content}"') @@ -300,7 +301,6 @@ def bump_version(ctx, dry_run=False, dev=False, pre=False, tag=None, commit=Fals current_version = Version.parse(__version__) today = datetime.date.today() day_offset = 0 - tomorrow = today + datetime.timedelta(days=1) month_offset = int(month_offset) if month_offset: # if we are offsetting by a month, grab the first day of the month diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index d3cbbd6b04..f58e4ad184 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -8,7 +8,6 @@ import json import re import shutil -import sys # from tempfile import TemporaryDirectory import tarfile @@ -22,7 +21,6 @@ from urllib3.util import parse_url as urllib3_parse -from pipenv.utils import mkdir_p from pipenv.vendor.vistir.compat import NamedTemporaryFile, TemporaryDirectory from pipenv.vendor.vistir.contextmanagers import open_file from pipenv.vendor.requirementslib.models.lockfile import Lockfile, merge_items @@ -138,7 +136,9 @@ def clean_vendor(ctx, vendor_dir): def detect_vendored_libs(vendor_dir): retval = [] for item in vendor_dir.iterdir(): - if item.is_dir(): + if item.name == "__pycache__": + continue + elif item.is_dir(): retval.append(item.name) elif "LICENSE" in item.name or "COPYING" in item.name: continue @@ -493,7 +493,7 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True): log("Running post-install cleanup...") post_install_cleanup(ctx, vendor_dir) # Detect the vendored packages/modules - vendored_libs = detect_vendored_libs(_get_vendor_dir(ctx)) + vendored_libs = detect_vendored_libs(_get_vendor_dir(ctx)) if not package else [package] log("Detected vendored libraries: %s" % ", ".join(vendored_libs)) # Apply pre-patches @@ -636,20 +636,13 @@ def download_licenses( requirements = packages_missing_licenses( ctx, vendor_dir, requirements_file, package=package ) - - with NamedTemporaryFile( - prefix="pipenv", suffix="vendor-reqs", delete=False, mode="w" - ) as fh: - fh.write("\n".join(requirements)) - new_requirements_file = fh.name - new_requirements_file = Path(new_requirements_file) log(requirements) tmp_dir = vendor_dir / "__tmp__" # TODO: Fix this whenever it gets sorted out (see https://github.com/pypa/pip/issues/5739) cmd = "pip download --no-binary :all: --only-binary requests_download --no-deps" enum_cmd = "pip download --no-deps" ctx.run("pip install flit") # needed for the next step - for req in requirements_file.read_text().splitlines(): + for req in requirements: if req.startswith("enum34"): exe_cmd = "{0} -d {1} {2}".format(enum_cmd, tmp_dir.as_posix(), req) else: @@ -677,7 +670,6 @@ def download_licenses( ) for sdist in tmp_dir.iterdir(): extract_license(vendor_dir, sdist) - new_requirements_file.unlink() drop_dir(tmp_dir) @@ -833,25 +825,25 @@ def unpin_file(contents): def unpin_and_copy_requirements(ctx, requirement_file, name="requirements.txt"): - with TemporaryDirectory() as tempdir: - target = Path(tempdir.name).joinpath("requirements.txt") - contents = unpin_file(requirement_file.read_text()) - target.write_text(contents) - env = { - "PIPENV_IGNORE_VIRTUALENVS": "1", - "PIPENV_NOSPIN": "1", - "PIPENV_PYTHON": "2.7", - } - with ctx.cd(tempdir.name): - ctx.run("pipenv install -r {0}".format(target.as_posix()), env=env, hide=True) - result = ctx.run("pipenv lock -r", env=env, hide=True).stdout.strip() - ctx.run("pipenv --rm", env=env, hide=True) - result = list(sorted([line.strip() for line in result.splitlines()[1:]])) - new_requirements = requirement_file.parent.joinpath(name) - requirement_file.rename( - requirement_file.parent.joinpath("{}.bak".format(name)) - ) - new_requirements.write_text("\n".join(result)) + tempdir = TemporaryDirectory(dir="D:/Workspace/tempdir") + target = Path(tempdir.name).joinpath("requirements.txt") + contents = unpin_file(requirement_file.read_text()) + target.write_text(contents) + env = { + "PIPENV_IGNORE_VIRTUALENVS": "1", + "PIPENV_NOSPIN": "1", + "PIPENV_PYTHON": "2.7", + } + with ctx.cd(tempdir.name): + ctx.run("pipenv install -r {0}".format(target.as_posix()), env=env, hide=True) + result = ctx.run("pipenv lock -r", env=env, hide=True).stdout.strip() + # ctx.run("pipenv --rm", env=env, hide=True) + result = list(sorted([line.strip() for line in result.splitlines()[1:]])) + new_requirements = requirement_file.parent.joinpath(name) + requirement_file.rename( + requirement_file.parent.joinpath("{}.bak".format(name)) + ) + new_requirements.write_text("\n".join(result)) return result diff --git a/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch b/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch deleted file mode 100644 index b0fcac2e8c..0000000000 --- a/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch +++ /dev/null @@ -1,161 +0,0 @@ -diff --git a/pipenv/vendor/dotenv/__init__.py b/pipenv/vendor/dotenv/__init__.py -index 105a32a..b88d9bc 100644 ---- a/pipenv/vendor/dotenv/__init__.py -+++ b/pipenv/vendor/dotenv/__init__.py -@@ -1,6 +1,9 @@ --from typing import Any, Optional # noqa -+from .compat import IS_TYPE_CHECKING - from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values - -+if IS_TYPE_CHECKING: -+ from typing import Any, Optional -+ - - def load_ipython_extension(ipython): - # type: (Any) -> None -diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py -index 235f329..d2a021a 100644 ---- a/pipenv/vendor/dotenv/cli.py -+++ b/pipenv/vendor/dotenv/cli.py -@@ -1,7 +1,6 @@ - import os - import sys - from subprocess import Popen --from typing import Any, Dict, List # noqa - - try: - import click -@@ -10,9 +9,13 @@ except ImportError: - 'Run pip install "python-dotenv[cli]" to fix this.') - sys.exit(1) - -+from .compat import IS_TYPE_CHECKING - from .main import dotenv_values, get_key, set_key, unset_key - from .version import __version__ - -+if IS_TYPE_CHECKING: -+ from typing import Any, List, Dict -+ - - @click.group() - @click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), -diff --git a/pipenv/vendor/dotenv/compat.py b/pipenv/vendor/dotenv/compat.py -index 394d3a3..61f555d 100644 ---- a/pipenv/vendor/dotenv/compat.py -+++ b/pipenv/vendor/dotenv/compat.py -@@ -1,5 +1,4 @@ - import sys --from typing import Text # noqa - - PY2 = sys.version_info[0] == 2 # type: bool - -@@ -9,6 +8,22 @@ else: - from io import StringIO # noqa - - -+def is_type_checking(): -+ # type: () -> bool -+ try: -+ from typing import TYPE_CHECKING -+ except ImportError: # pragma: no cover -+ return False -+ return TYPE_CHECKING -+ -+ -+IS_TYPE_CHECKING = is_type_checking() -+ -+ -+if IS_TYPE_CHECKING: -+ from typing import Text -+ -+ - def to_env(text): - # type: (Text) -> str - """ -diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py -index 04d2241..06a210e 100644 ---- a/pipenv/vendor/dotenv/main.py -+++ b/pipenv/vendor/dotenv/main.py -@@ -7,16 +7,17 @@ import re - import shutil - import sys - import tempfile --from typing import (Dict, Iterator, List, Match, Optional, # noqa -- Pattern, Union, TYPE_CHECKING, Text, IO, Tuple) - import warnings - from collections import OrderedDict - from contextlib import contextmanager - --from .compat import StringIO, PY2, to_env -+from .compat import StringIO, PY2, to_env, IS_TYPE_CHECKING - from .parser import parse_stream - --if TYPE_CHECKING: # pragma: no cover -+if IS_TYPE_CHECKING: -+ from typing import ( -+ Dict, Iterator, Match, Optional, Pattern, Union, Text, IO, Tuple -+ ) - if sys.version_info >= (3, 6): - _PathLike = os.PathLike - else: -@@ -273,6 +274,14 @@ def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): - - def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, **kwargs): - # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, Union[None, Text]) -> bool -+ """Parse a .env file and then load all the variables found as environment variables. -+ -+ - *dotenv_path*: absolute or relative path to .env file. -+ - *stream*: `StringIO` object with .env content. -+ - *verbose*: whether to output the warnings related to missing .env file etc. Defaults to `False`. -+ - *override*: where to override the system environment variables with the variables in `.env` file. -+ Defaults to `False`. -+ """ - f = dotenv_path or stream or find_dotenv() - return DotEnv(f, verbose=verbose, **kwargs).set_as_environment_variables(override=override) - -diff --git a/pipenv/vendor/dotenv/parser.py b/pipenv/vendor/dotenv/parser.py -index b63cb3a..034ebfd 100644 ---- a/pipenv/vendor/dotenv/parser.py -+++ b/pipenv/vendor/dotenv/parser.py -@@ -1,9 +1,14 @@ - import codecs - import re --from typing import (IO, Iterator, Match, NamedTuple, Optional, Pattern, # noqa -- Sequence, Text) - --from .compat import to_text -+from .compat import to_text, IS_TYPE_CHECKING -+ -+ -+if IS_TYPE_CHECKING: -+ from typing import ( # noqa:F401 -+ IO, Iterator, Match, NamedTuple, Optional, Pattern, Sequence, Text, -+ Tuple -+ ) - - - def make_regex(string, extra_flags=0): -@@ -25,9 +30,20 @@ _rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") - _double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") - _single_quote_escapes = make_regex(r"\\[\\']") - --Binding = NamedTuple("Binding", [("key", Optional[Text]), -- ("value", Optional[Text]), -- ("original", Text)]) -+ -+try: -+ # this is necessary because we only import these from typing -+ # when we are type checking, and the linter is upset if we -+ # re-import -+ import typing -+ Binding = typing.NamedTuple("Binding", [("key", typing.Optional[typing.Text]), -+ ("value", typing.Optional[typing.Text]), -+ ("original", typing.Text)]) -+except ImportError: # pragma: no cover -+ from collections import namedtuple -+ Binding = namedtuple("Binding", ["key", # type: ignore -+ "value", -+ "original"]) # type: Tuple[Optional[Text], Optional[Text], Text] - - - class Error(Exception): diff --git a/tasks/vendoring/patches/vendor/passa-close-session.patch b/tasks/vendoring/patches/vendor/passa-close-session.patch deleted file mode 100644 index 2953f32b51..0000000000 --- a/tasks/vendoring/patches/vendor/passa-close-session.patch +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/pipenv/vendor/passa/internals/dependencies.py b/pipenv/vendor/passa/internals/dependencies.py -index 53b19b17..358cc33b 100644 ---- a/pipenv/vendor/passa/internals/dependencies.py -+++ b/pipenv/vendor/passa/internals/dependencies.py -@@ -154,6 +154,7 @@ def _get_dependencies_from_json(ireq, sources): - return dependencies - except Exception as e: - print("unable to read dependencies via {0} ({1})".format(url, e)) -+ session.close() - return - - -diff --git a/pipenv/vendor/passa/models/projects.py b/pipenv/vendor/passa/models/projects.py -index f6e037d6..c7807c05 100644 ---- a/pipenv/vendor/passa/models/projects.py -+++ b/pipenv/vendor/passa/models/projects.py -@@ -6,7 +6,7 @@ import collections - import io - import os - --import attr -+from pipenv.vendor import attr - import packaging.markers - import packaging.utils - import plette diff --git a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch similarity index 79% rename from tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch rename to tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch index 5d5a381f3f..9cfcfd024b 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch @@ -1,8 +1,8 @@ diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py -index 7820aa5..2082fc8 100644 +index 6f6f2a10..cd67538c 100644 --- a/pipenv/vendor/pipdeptree.py +++ b/pipenv/vendor/pipdeptree.py -@@ -13,11 +13,9 @@ try: +@@ -13,13 +13,11 @@ try: except ImportError: from ordereddict import OrderedDict @@ -15,6 +15,8 @@ index 7820aa5..2082fc8 100644 +sys.path.append(pardir) +from pipenv.vendor.pip_shims import get_installed_distributions, FrozenRequirement - import pkg_resources +-from pipenv.patched.notpip._vendor import pkg_resources ++import pkg_resources # inline: - + # from graphviz import backend, Digraph + diff --git a/tasks/vendoring/patches/vendor/update-attrs-import-path.patch b/tasks/vendoring/patches/vendor/update-attrs-import-path.patch deleted file mode 100644 index ffebbfebb6..0000000000 --- a/tasks/vendoring/patches/vendor/update-attrs-import-path.patch +++ /dev/null @@ -1,143 +0,0 @@ -diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py -index 2608479a..1a610ce7 100644 ---- a/pipenv/vendor/requirementslib/models/dependencies.py -+++ b/pipenv/vendor/requirementslib/models/dependencies.py -@@ -6,7 +6,7 @@ import copy - import functools - import os - --import attr -+from pipenv.vendor import attr - import packaging.markers - import packaging.version - import pip_shims.shims -diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py -index 3eabc504..841fc74c 100644 ---- a/pipenv/vendor/requirementslib/models/lockfile.py -+++ b/pipenv/vendor/requirementslib/models/lockfile.py -@@ -5,7 +5,7 @@ import copy - import itertools - import os - --import attr -+from pipenv.vendor import attr - import plette.lockfiles - import six - from vistir.compat import FileNotFoundError, JSONDecodeError, Path -diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py -index 94410a20..b07e444c 100644 ---- a/pipenv/vendor/requirementslib/models/markers.py -+++ b/pipenv/vendor/requirementslib/models/markers.py -@@ -3,7 +3,7 @@ import itertools - import operator - import re - --import attr -+from pipenv.vendor import attr - import distlib.markers - import packaging.version - import six -diff --git a/pipenv/vendor/requirementslib/models/metadata.py b/pipenv/vendor/requirementslib/models/metadata.py -index b45b1f02..671a311b 100644 ---- a/pipenv/vendor/requirementslib/models/metadata.py -+++ b/pipenv/vendor/requirementslib/models/metadata.py -@@ -9,7 +9,7 @@ import os - import zipfile - from collections import defaultdict - --import attr -+from pipenv.vendor import attr - import dateutil.parser - import distlib.metadata - import distlib.wheel -diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py -index 9c0aea4e..9bda73d4 100644 ---- a/pipenv/vendor/requirementslib/models/pipfile.py -+++ b/pipenv/vendor/requirementslib/models/pipfile.py -@@ -7,7 +7,7 @@ import itertools - import os - import sys - --import attr -+from pipenv.vendor import attr - import plette.models.base - import plette.pipfiles - import tomlkit -diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py -index 7c1b0e81..4c73823c 100644 ---- a/pipenv/vendor/requirementslib/models/project.py -+++ b/pipenv/vendor/requirementslib/models/project.py -@@ -6,7 +6,7 @@ import collections - import io - import os - --import attr -+from pipenv.vendor import attr - import packaging.markers - import packaging.utils - import plette -diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py -index a0045f45..3ce8d8f5 100644 ---- a/pipenv/vendor/requirementslib/models/requirements.py -+++ b/pipenv/vendor/requirementslib/models/requirements.py -@@ -10,7 +10,7 @@ from contextlib import contextmanager - from distutils.sysconfig import get_python_lib - from functools import partial - --import attr -+from pipenv.vendor import attr - import pip_shims - import six - import vistir -diff --git a/pipenv/vendor/requirementslib/models/resolvers.py b/pipenv/vendor/requirementslib/models/resolvers.py -index 43590523..4554b299 100644 ---- a/pipenv/vendor/requirementslib/models/resolvers.py -+++ b/pipenv/vendor/requirementslib/models/resolvers.py -@@ -1,7 +1,7 @@ - # -*- coding=utf-8 -*- - from contextlib import contextmanager - --import attr -+from pipenv.vendor import attr - import six - from pip_shims.shims import Wheel - -diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py -index f0d40f29..9c97a394 100644 ---- a/pipenv/vendor/requirementslib/models/setup_info.py -+++ b/pipenv/vendor/requirementslib/models/setup_info.py -@@ -12,7 +12,7 @@ import shutil - import sys - from functools import partial - --import attr -+from pipenv.vendor import attr - import chardet - import packaging.specifiers - import packaging.utils -diff --git a/pipenv/vendor/requirementslib/models/url.py b/pipenv/vendor/requirementslib/models/url.py -index 3d5743e6..b0c98de8 100644 ---- a/pipenv/vendor/requirementslib/models/url.py -+++ b/pipenv/vendor/requirementslib/models/url.py -@@ -1,7 +1,7 @@ - # -*- coding=utf-8 -*- - from __future__ import absolute_import, print_function - --import attr -+from pipenv.vendor import attr - import pip_shims.shims - from orderedmultidict import omdict - from six.moves.urllib.parse import quote, unquote_plus, unquote as url_unquote -diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py -index 0f96a331..273305db 100644 ---- a/pipenv/vendor/requirementslib/models/vcs.py -+++ b/pipenv/vendor/requirementslib/models/vcs.py -@@ -5,7 +5,7 @@ import importlib - import os - import sys - --import attr -+from pipenv.vendor import attr - import pip_shims - import six - diff --git a/tasks/vendoring/patches/vendor/yaspin-signal-handling.patch b/tasks/vendoring/patches/vendor/yaspin-signal-handling.patch index c6144f4073..56d417637a 100644 --- a/tasks/vendoring/patches/vendor/yaspin-signal-handling.patch +++ b/tasks/vendoring/patches/vendor/yaspin-signal-handling.patch @@ -1,8 +1,8 @@ diff --git a/pipenv/vendor/yaspin/core.py b/pipenv/vendor/yaspin/core.py -index d01fb98e..06b8b621 100644 +index b6d5d9d1..a215a46f 100644 --- a/pipenv/vendor/yaspin/core.py +++ b/pipenv/vendor/yaspin/core.py -@@ -16,6 +16,9 @@ import sys +@@ -20,12 +20,17 @@ import sys import threading import time @@ -12,17 +12,15 @@ index d01fb98e..06b8b621 100644 from .base_spinner import default_spinner from .compat import PY2, basestring, builtin_str, bytes, iteritems, str from .constants import COLOR_ATTRS, COLOR_MAP, ENCODING, SPINNER_ATTRS -@@ -23,6 +26,9 @@ from .helpers import to_unicode + from .helpers import to_unicode from .termcolor import colored - +colorama.init() + -+ + class Yaspin(object): """Implements a context manager that spawns a thread - to write spinner frames into a tty (stdout) during -@@ -369,11 +375,14 @@ class Yaspin(object): +@@ -394,11 +399,14 @@ class Yaspin(object): # SIGKILL cannot be caught or ignored, and the receiving # process cannot perform any clean-up upon receiving this # signal. @@ -42,7 +40,7 @@ index d01fb98e..06b8b621 100644 for sig, sig_handler in iteritems(self._sigmap): # A handler for a particular signal, once set, remains -@@ -521,14 +530,12 @@ class Yaspin(object): +@@ -543,14 +551,12 @@ class Yaspin(object): @staticmethod def _hide_cursor(): @@ -60,19 +58,3 @@ index d01fb98e..06b8b621 100644 def _clear_line(): - sys.stdout.write("\033[K") + sys.stdout.write(chr(27) + "[K") -diff --git a/pipenv/vendor/yaspin/spinners.py b/pipenv/vendor/yaspin/spinners.py -index 9c3fa7b8..60822a2c 100644 ---- a/pipenv/vendor/yaspin/spinners.py -+++ b/pipenv/vendor/yaspin/spinners.py -@@ -11,10 +11,7 @@ import codecs - import os - from collections import namedtuple - --try: -- import simplejson as json --except ImportError: -- import json -+import json - - - THIS_DIR = os.path.dirname(os.path.realpath(__file__)) diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index a0ecf04d54..f505c40623 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -1,5 +1,6 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function +import os import pytest from flaky import flaky @@ -45,6 +46,24 @@ def test_git_vcs_install(PipenvInstance): } +@flaky +@pytest.mark.vcs +@pytest.mark.install +@pytest.mark.needs_internet +def test_git_vcs_install_with_env_var(PipenvInstance): + with PipenvInstance(chdir=True) as p: + p._pipfile.add("six", {"git": "git://${GIT_HOST}/benjaminp/six.git", "ref": "1.11.0"}) + os.environ["GIT_HOST"] = "github.com" + c = p.pipenv("install") + assert c.return_code == 0 + assert "six" in p.pipfile["packages"] + assert "git" in p.pipfile["packages"]["six"] + assert p.lockfile["default"]["six"] == { + "git": "git://${GIT_HOST}/benjaminp/six.git", + "ref": "15e31431af97e5e64b80af0a3f598d382bcdd49a", + } + + @flaky @pytest.mark.vcs @pytest.mark.install