Skip to content

Commit

Permalink
Upgrade Dependencies (#4038)
Browse files Browse the repository at this point in the history
* Upgrade distlib to 0.2.4

* Upgrade distro to 1.0.0

* Upgrade html5lib to 1.0b10

* Upgrade requests to 2.11.1

* Upgrade CacheControl to 0.11.7

* Upgrade ipaddress to 1.0.17

* Upgrade pyparsing to 2.1.10

* Upgrade packaging to 16.8

* Add webencodings 0.5

* Add ordereddict 1.1
  • Loading branch information
dstufft authored Oct 30, 2016
1 parent 22f2e01 commit c8e8a99
Show file tree
Hide file tree
Showing 96 changed files with 7,077 additions and 2,557 deletions.
1 change: 1 addition & 0 deletions pip/_vendor/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ Modifications
* CacheControl has been modified to import its dependencies from pip._vendor
* packaging has been modified to import its dependencies from pip._vendor
* requests has been modified *not* to optionally load any C dependencies.
* Modified distro to delay importing argparse to avoid errors on 2.6


Debundling
Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/cachecontrol/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""
__author__ = 'Eric Larson'
__email__ = '[email protected]'
__version__ = '0.11.6'
__version__ = '0.11.7'

from .wrapper import CacheControl
from .adapter import CacheControlAdapter
Expand Down
18 changes: 13 additions & 5 deletions pip/_vendor/cachecontrol/adapter.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import types
import functools

from pip._vendor.requests.adapters import HTTPAdapter
Expand Down Expand Up @@ -55,6 +56,10 @@ def build_response(self, request, response, from_cache=False):
cached response
"""
if not from_cache and request.method == 'GET':
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)

# apply any expiration heuristics
if response.status == 304:
Expand Down Expand Up @@ -82,11 +87,6 @@ def build_response(self, request, response, from_cache=False):
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)

# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
Expand All @@ -97,6 +97,14 @@ def build_response(self, request, response, from_cache=False):
response,
)
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length

def _update_chunk_length(self):
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close()
response._update_chunk_length = types.MethodType(_update_chunk_length, response)

resp = super(CacheControlAdapter, self).build_response(
request, response
Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/cachecontrol/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def cache_response(self, request, response, body=None):
elif 'date' in response_headers:
# cache when there is a max-age > 0
if cc and cc.get('max-age'):
if int(cc['max-age']) > 0:
if cc['max-age'].isdigit() and int(cc['max-age']) > 0:
logger.debug('Caching b/c date exists and max-age > 0')
self.cache.set(
cache_url,
Expand Down
33 changes: 24 additions & 9 deletions pip/_vendor/cachecontrol/filewrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,19 +45,34 @@ def __is_fp_closed(self):
# TODO: Add some logging here...
return False

def _close(self):
if self.__callback:
self.__callback(self.__buf.getvalue())

# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
# because the callback is holding a reference to something which
# has a __del__ method. Setting this to None breaks the cycle
# and allows the garbage collector to do it's thing normally.
self.__callback = None

def read(self, amt=None):
data = self.__fp.read(amt)
self.__buf.write(data)
if self.__is_fp_closed():
self._close()

return data

def _safe_read(self, amt):
data = self.__fp._safe_read(amt)
if amt == 2 and data == b'\r\n':
# urllib executes this read to toss the CRLF at the end
# of the chunk.
return data

self.__buf.write(data)
if self.__is_fp_closed():
if self.__callback:
self.__callback(self.__buf.getvalue())

# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
# because the callback is holding a reference to something which
# has a __del__ method. Setting this to None breaks the cycle
# and allows the garbage collector to do it's thing normally.
self.__callback = None
self._close()

return data
6 changes: 6 additions & 0 deletions pip/_vendor/cachecontrol/serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,12 @@ def prepare_response(self, request, cached):

body_raw = cached["response"].pop("body")

headers = CaseInsensitiveDict(data=cached['response']['headers'])
if headers.get('transfer-encoding', '') == 'chunked':
headers.pop('transfer-encoding')

cached['response']['headers'] = headers

try:
body = io.BytesIO(body_raw)
except TypeError:
Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/distlib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
import logging

__version__ = '0.2.3'
__version__ = '0.2.4'

class DistlibException(Exception):
pass
Expand Down
6 changes: 3 additions & 3 deletions pip/_vendor/distlib/_backport/shutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ class ReadError(EnvironmentError):
"""Raised when an archive cannot be read"""

class RegistryError(Exception):
"""Raised when a registery operation with the archiving
and unpacking registeries fails"""
"""Raised when a registry operation with the archiving
and unpacking registries fails"""


try:
Expand Down Expand Up @@ -648,7 +648,7 @@ def register_unpack_format(name, extensions, function, extra_args=None,
_UNPACK_FORMATS[name] = extensions, function, extra_args, description

def unregister_unpack_format(name):
"""Removes the pack format from the registery."""
"""Removes the pack format from the registry."""
del _UNPACK_FORMATS[name]

def _ensure_directory(path):
Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/distlib/_backport/tarfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadble tar archives."""
"""Exception for unreadable tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
Expand Down
17 changes: 13 additions & 4 deletions pip/_vendor/distlib/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@
import re
import sys

try:
import ssl
except ImportError:
ssl = None

if sys.version_info[0] < 3: # pragma: no cover
from StringIO import StringIO
string_types = basestring,
Expand All @@ -30,8 +35,10 @@ def quote(s):
import urllib2
from urllib2 import (Request, urlopen, URLError, HTTPError,
HTTPBasicAuthHandler, HTTPPasswordMgr,
HTTPSHandler, HTTPHandler, HTTPRedirectHandler,
HTTPHandler, HTTPRedirectHandler,
build_opener)
if ssl:
from urllib2 import HTTPSHandler
import httplib
import xmlrpclib
import Queue as queue
Expand Down Expand Up @@ -66,8 +73,10 @@ def splituser(host):
from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
pathname2url,
HTTPBasicAuthHandler, HTTPPasswordMgr,
HTTPSHandler, HTTPHandler, HTTPRedirectHandler,
HTTPHandler, HTTPRedirectHandler,
build_opener)
if ssl:
from urllib.request import HTTPSHandler
from urllib.error import HTTPError, URLError, ContentTooShortError
import http.client as httplib
import urllib.request as urllib2
Expand Down Expand Up @@ -101,7 +110,7 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survery of established
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
Expand Down Expand Up @@ -366,7 +375,7 @@ def _get_normal_name(orig_enc):
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
be used to decode a Python source file. It requires one argument, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
Expand Down
2 changes: 1 addition & 1 deletion pip/_vendor/distlib/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -1308,5 +1308,5 @@ def make_dist(name, version, **kwargs):
md = Metadata(**kwargs)
md.name = name
md.version = version
md.summary = summary or 'Plaeholder for summary'
md.summary = summary or 'Placeholder for summary'
return Distribution(md)
8 changes: 5 additions & 3 deletions pip/_vendor/distlib/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,9 @@ def __init__(self, url=None):
self.gpg_home = None
self.rpc_proxy = None
with open(os.devnull, 'w') as sink:
for s in ('gpg2', 'gpg'):
# Use gpg by default rather than gpg2, as gpg2 insists on
# prompting for passwords
for s in ('gpg', 'gpg2'):
try:
rc = subprocess.check_call([s, '--version'], stdout=sink,
stderr=sink)
Expand All @@ -74,7 +76,7 @@ def _get_pypirc_command(self):
def read_configuration(self):
"""
Read the PyPI access configuration as supported by distutils, getting
PyPI to do the acutal work. This populates ``username``, ``password``,
PyPI to do the actual work. This populates ``username``, ``password``,
``realm`` and ``url`` attributes from the configuration.
"""
# get distutils to do the work
Expand Down Expand Up @@ -276,7 +278,7 @@ def upload_file(self, metadata, filename, signer=None, sign_password=None,
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
'protcol_version': '1',
'protocol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': md5_digest,
Expand Down
41 changes: 30 additions & 11 deletions pip/_vendor/distlib/locators.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@
from . import DistlibException
from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
queue, quote, unescape, string_types, build_opener,
HTTPRedirectHandler as BaseRedirectHandler,
HTTPRedirectHandler as BaseRedirectHandler, text_type,
Request, HTTPError, URLError)
from .database import Distribution, DistributionPath, make_dist
from .metadata import Metadata
from .util import (cached_property, parse_credentials, ensure_slash,
split_filename, get_project_data, parse_requirement,
parse_name_and_version, ServerProxy)
parse_name_and_version, ServerProxy, normalize_name)
from .version import get_scheme, UnsupportedVersionError
from .wheel import Wheel, is_compatible

Expand Down Expand Up @@ -113,6 +113,28 @@ def __init__(self, scheme='default'):
# is set from the requirement passed to locate(). See issue #18 for
# why this can be useful to know.
self.matcher = None
self.errors = queue.Queue()

def get_errors(self):
"""
Return any errors which have occurred.
"""
result = []
while not self.errors.empty(): # pragma: no cover
try:
e = self.errors.get(False)
result.append(e)
except self.errors.Empty:
continue
self.errors.task_done()
return result

def clear_errors(self):
"""
Clear any errors which may have been logged.
"""
# Just get the errors and throw them away
self.get_errors()

def clear_cache(self):
self._cache.clear()
Expand Down Expand Up @@ -155,6 +177,7 @@ def get_project(self, name):
elif name in self._cache:
result = self._cache[name]
else:
self.clear_errors()
result = self._get_project(name)
self._cache[name] = result
return result
Expand Down Expand Up @@ -210,14 +233,7 @@ def convert_url_to_download_info(self, url, project_name):
"filename" and "url"; otherwise, None is returned.
"""
def same_project(name1, name2):
name1, name2 = name1.lower(), name2.lower()
if name1 == name2:
result = True
else:
# distribute replaces '-' by '_' in project names, so it
# can tell where the version starts in a filename.
result = name1.replace('_', '-') == name2.replace('_', '-')
return result
return normalize_name(name1) == normalize_name(name2)

result = None
scheme, netloc, path, params, query, frag = urlparse(url)
Expand Down Expand Up @@ -250,7 +266,7 @@ def same_project(name1, name2):
'python-version': ', '.join(
['.'.join(list(v[2:])) for v in wheel.pyver]),
}
except Exception as e:
except Exception as e: # pragma: no cover
logger.warning('invalid path for wheel: %s', path)
elif path.endswith(self.downloadable_extensions):
path = filename = posixpath.basename(path)
Expand Down Expand Up @@ -489,6 +505,7 @@ def _get_project(self, name):
# result['urls'].setdefault(md.version, set()).add(url)
# result['digests'][url] = self._get_digest(info)
except Exception as e:
self.errors.put(text_type(e))
logger.exception('JSON fetch failed: %s', e)
return result

Expand Down Expand Up @@ -714,6 +731,8 @@ def _fetch(self):
self._should_queue(link, url, rel)):
logger.debug('Queueing %s from %s', link, url)
self._to_fetch.put(link)
except Exception as e: # pragma: no cover
self.errors.put(text_type(e))
finally:
# always do this, to avoid hangs :-)
self._to_fetch.task_done()
Expand Down
Loading

0 comments on commit c8e8a99

Please sign in to comment.