Skip to content

Commit

Permalink
Merge pull request #10815 from pradyunsg/vendoring-upgrade
Browse files Browse the repository at this point in the history
  • Loading branch information
pradyunsg authored Jan 27, 2022
2 parents bbcbfc6 + ab2ee1a commit dec279e
Show file tree
Hide file tree
Showing 120 changed files with 13,311 additions and 14,027 deletions.
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ include pyproject.toml

include src/pip/_vendor/README.rst
include src/pip/_vendor/vendor.txt
include src/pip/_vendor/pyparsing/diagram/template.jinja2
recursive-include src/pip/_vendor *LICENSE*
recursive-include src/pip/_vendor *COPYING*

Expand Down
1 change: 1 addition & 0 deletions news/CacheControl.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade CacheControl to 0.12.10
1 change: 1 addition & 0 deletions news/certifi.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade certifi to 2021.10.8
1 change: 1 addition & 0 deletions news/distlib.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade distlib to 0.3.4
1 change: 1 addition & 0 deletions news/idna.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade idna to 3.3
1 change: 1 addition & 0 deletions news/msgpack.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade msgpack to 1.0.3
1 change: 1 addition & 0 deletions news/packaging.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade packaging to 21.3
1 change: 1 addition & 0 deletions news/platformdirs.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade platformdirs to 2.4.1
2 changes: 1 addition & 1 deletion news/pygments.vendor.rst
Original file line number Diff line number Diff line change
@@ -1 +1 @@
Add pygments 2.10.0 as a vendored dependency.
Add pygments 2.11.2 as a vendored dependency.
1 change: 1 addition & 0 deletions news/pyparsing.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade pyparsing to 3.0.7
1 change: 1 addition & 0 deletions news/rich-modifications.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Tree-trim unused portions of vendored rich, to reduce the distribution size.
2 changes: 1 addition & 1 deletion news/rich.vendor.rst
Original file line number Diff line number Diff line change
@@ -1 +1 @@
Add rich 10.14.0 as a vendored dependency.
Add rich 11.0.0 as a vendored dependency.
2 changes: 1 addition & 1 deletion news/typing_extensions.vendor.rst
Original file line number Diff line number Diff line change
@@ -1 +1 @@
Add typing_extensions 3.10.0.2 as a vendored dependency.
Add typing_extensions 4.0.1 as a vendored dependency.
2 changes: 1 addition & 1 deletion src/pip/_internal/network/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def get(self, key: str) -> Optional[bytes]:
with open(path, "rb") as f:
return f.read()

def set(self, key: str, value: bytes) -> None:
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
path = self._get_cache_path(key)
with suppressed_cache_errors():
ensure_dir(os.path.dirname(path))
Expand Down
6 changes: 2 additions & 4 deletions src/pip/_vendor/cachecontrol/LICENSE.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Copyright 2015 Eric Larson
Copyright 2012-2021 Eric Larson

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand All @@ -8,8 +8,6 @@ You may obtain a copy of the License at

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied.

WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
9 changes: 8 additions & 1 deletion src/pip/_vendor/cachecontrol/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

"""CacheControl import Interface.
Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = "Eric Larson"
__email__ = "[email protected]"
__version__ = "0.12.6"
__version__ = "0.12.10"

from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController

import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
4 changes: 4 additions & 0 deletions src/pip/_vendor/cachecontrol/_cmd.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

import logging

from pip._vendor import requests
Expand Down
10 changes: 7 additions & 3 deletions src/pip/_vendor/cachecontrol/adapter.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

import types
import functools
import zlib

from pip._vendor.requests.adapters import HTTPAdapter

from .controller import CacheController
from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
from .cache import DictCache
from .filewrapper import CallbackFileWrapper


class CacheControlAdapter(HTTPAdapter):
invalidating_methods = {"PUT", "DELETE"}
invalidating_methods = {"PUT", "PATCH", "DELETE"}

def __init__(
self,
Expand Down Expand Up @@ -93,7 +97,7 @@ def build_response(
response = cached_response

# We always cache the 301 responses
elif response.status == 301:
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
self.controller.cache_response(request, response)
else:
# Wrap the response file with a wrapper that will cache the
Expand Down
8 changes: 6 additions & 2 deletions src/pip/_vendor/cachecontrol/cache.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

"""
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
Expand All @@ -10,7 +14,7 @@ class BaseCache(object):
def get(self, key):
raise NotImplementedError()

def set(self, key, value):
def set(self, key, value, expires=None):
raise NotImplementedError()

def delete(self, key):
Expand All @@ -29,7 +33,7 @@ def __init__(self, init_dict=None):
def get(self, key):
return self.data.get(key, None)

def set(self, key, value):
def set(self, key, value, expires=None):
with self.lock:
self.data.update({key: value})

Expand Down
4 changes: 4 additions & 0 deletions src/pip/_vendor/cachecontrol/caches/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

from .file_cache import FileCache # noqa
from .redis_cache import RedisCache # noqa
6 changes: 5 additions & 1 deletion src/pip/_vendor/cachecontrol/caches/file_cache.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

import hashlib
import os
from textwrap import dedent
Expand Down Expand Up @@ -114,7 +118,7 @@ def get(self, key):
except FileNotFoundError:
return None

def set(self, key, value):
def set(self, key, value, expires=None):
name = self._fn(key)

# Make sure the directory exists
Expand Down
4 changes: 4 additions & 0 deletions src/pip/_vendor/cachecontrol/caches/redis_cache.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

from __future__ import division

from datetime import datetime
Expand Down
5 changes: 4 additions & 1 deletion src/pip/_vendor/cachecontrol/compat.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

try:
from urllib.parse import urljoin
except ImportError:
Expand All @@ -9,7 +13,6 @@
except ImportError:
import pickle


# Handle the case where the requests module has been patched to not have
# urllib3 bundled as part of its source.
try:
Expand Down
67 changes: 53 additions & 14 deletions src/pip/_vendor/cachecontrol/controller.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

"""
The httplib2 algorithms ported for use with requests.
"""
Expand All @@ -17,6 +21,8 @@

URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")

PERMANENT_REDIRECT_STATUSES = (301, 308)


def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
Expand All @@ -37,7 +43,7 @@ def __init__(
self.cache = DictCache() if cache is None else cache
self.cache_etags = cache_etags
self.serializer = serializer or Serializer()
self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)

@classmethod
def _urlnorm(cls, uri):
Expand Down Expand Up @@ -147,17 +153,18 @@ def cached_request(self, request):
logger.warning("Cache entry deserialization failed, entry ignored")
return False

# If we have a cached 301, return it immediately. We don't
# need to test our response for other headers b/c it is
# If we have a cached permanent redirect, return it immediately. We
# don't need to test our response for other headers b/c it is
# intrinsically "cacheable" as it is Permanent.
#
# See:
# https://tools.ietf.org/html/rfc7231#section-6.4.2
#
# Client can try to refresh the value by repeating the request
# with cache busting headers as usual (ie no-cache).
if resp.status == 301:
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
msg = (
'Returning cached "301 Moved Permanently" response '
"Returning cached permanent redirect response "
"(ignoring date and etag information)"
)
logger.debug(msg)
Expand Down Expand Up @@ -261,6 +268,11 @@ def cache_response(self, request, response, body=None, status_codes=None):

response_headers = CaseInsensitiveDict(response.headers)

if "date" in response_headers:
date = calendar.timegm(parsedate_tz(response_headers["date"]))
else:
date = 0

# If we've been given a body, our response has a Content-Length, that
# Content-Length is valid then we can check to see if the body we've
# been given matches the expected size, and if it doesn't we'll just
Expand Down Expand Up @@ -304,35 +316,62 @@ def cache_response(self, request, response, body=None, status_codes=None):

# If we've been given an etag, then keep the response
if self.cache_etags and "etag" in response_headers:
expires_time = 0
if response_headers.get("expires"):
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires) - date

expires_time = max(expires_time, 14 * 86400)

logger.debug("etag object cached for {0} seconds".format(expires_time))
logger.debug("Caching due to etag")
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
cache_url,
self.serializer.dumps(request, response, body),
expires=expires_time,
)

# Add to the cache any 301s. We do this before looking that
# the Date headers.
elif response.status == 301:
logger.debug("Caching permanant redirect")
self.cache.set(cache_url, self.serializer.dumps(request, response))
# Add to the cache any permanent redirects. We do this before looking
# that the Date headers.
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
logger.debug("Caching permanent redirect")
self.cache.set(cache_url, self.serializer.dumps(request, response, b""))

# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
date = calendar.timegm(parsedate_tz(response_headers["date"]))
# cache when there is a max-age > 0
if "max-age" in cc and cc["max-age"] > 0:
logger.debug("Caching b/c date exists and max-age > 0")
expires_time = cc["max-age"]
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
cache_url,
self.serializer.dumps(request, response, body),
expires=expires_time,
)

# If the request can expire, it means we should cache it
# in the meantime.
elif "expires" in response_headers:
if response_headers["expires"]:
logger.debug("Caching b/c of expires header")
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires) - date
else:
expires_time = None

logger.debug(
"Caching b/c of expires header. expires in {0} seconds".format(
expires_time
)
)
self.cache.set(
cache_url, self.serializer.dumps(request, response, body=body)
cache_url,
self.serializer.dumps(request, response, body=body),
expires=expires_time,
)

def update_cached_response(self, request, response):
Expand Down
39 changes: 35 additions & 4 deletions src/pip/_vendor/cachecontrol/filewrapper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
from io import BytesIO
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

from tempfile import NamedTemporaryFile
import mmap


class CallbackFileWrapper(object):
Expand All @@ -11,10 +16,17 @@ class CallbackFileWrapper(object):
This class uses members with a double underscore (__) leading prefix so as
not to accidentally shadow an attribute.
The data is stored in a temporary file until it is all available. As long
as the temporary files directory is disk-based (sometimes it's a
memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
pressure is high. For small files the disk usually won't be used at all,
it'll all be in the filesystem memory cache, so there should be no
performance impact.
"""

def __init__(self, fp, callback):
self.__buf = BytesIO()
self.__buf = NamedTemporaryFile("rb+", delete=True)
self.__fp = fp
self.__callback = callback

Expand Down Expand Up @@ -49,7 +61,19 @@ def __is_fp_closed(self):

def _close(self):
if self.__callback:
self.__callback(self.__buf.getvalue())
if self.__buf.tell() == 0:
# Empty file:
result = b""
else:
# Return the data without actually loading it into memory,
# relying on Python's buffer API and mmap(). mmap() just gives
# a view directly into the filesystem's memory cache, so it
# doesn't result in duplicate memory use.
self.__buf.seek(0, 0)
result = memoryview(
mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
)
self.__callback(result)

# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
Expand All @@ -58,9 +82,16 @@ def _close(self):
# and allows the garbage collector to do it's thing normally.
self.__callback = None

# Closing the temporary file releases memory and frees disk space.
# Important when caching big files.
self.__buf.close()

def read(self, amt=None):
data = self.__fp.read(amt)
self.__buf.write(data)
if data:
# We may be dealing with b'', a sign that things are over:
# it's passed e.g. after we've already closed self.__buf.
self.__buf.write(data)
if self.__is_fp_closed():
self._close()

Expand Down
Loading

0 comments on commit dec279e

Please sign in to comment.