Skip to content

Commit

Permalink
Update requirementslib
Browse files Browse the repository at this point in the history
Signed-off-by: Dan Ryan <[email protected]>
  • Loading branch information
techalchemy committed Sep 4, 2018
1 parent 12217e6 commit 597bc8c
Show file tree
Hide file tree
Showing 6 changed files with 197 additions and 76 deletions.
2 changes: 1 addition & 1 deletion pipenv/vendor/requirementslib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# -*- coding=utf-8 -*-
__version__ = '1.1.5'
__version__ = '1.1.6.dev0'


from .exceptions import RequirementError
Expand Down
163 changes: 118 additions & 45 deletions pipenv/vendor/requirementslib/models/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,20 @@
from __future__ import absolute_import, print_function, unicode_literals

import copy
import errno
import hashlib
import json
import os
import six
import sys

from contextlib import contextmanager

import requests
import pip_shims
import vistir

from appdirs import user_cache_dir
from packaging.requirements import Requirement

from pip_shims.shims import (
FAVORITE_HASH, Link, SafeFileCache, VcsSupport, is_file_url, url_to_path
)
from .utils import as_tuple, key_from_req, lookup_table
from .utils import as_tuple, key_from_req, lookup_table, get_pinned_version


if six.PY2:
Expand Down Expand Up @@ -194,30 +189,32 @@ def _reverse_dependencies(self, cache_keys):
for dep_name in self.cache[name][version_and_extras])


class HashCache(SafeFileCache):
"""Caches hashes of PyPI artifacts so we do not need to re-download them
class HashCache(pip_shims.SafeFileCache):
"""Caches hashes of PyPI artifacts so we do not need to re-download them.
Hashes are only cached when the URL appears to contain a hash in it and the cache key includes
the hash value returned from the server). This ought to avoid ssues where the location on the
server changes."""
Hashes are only cached when the URL appears to contain a hash in it and the
cache key includes the hash value returned from the server). This ought to
avoid ssues where the location on the server changes.
"""
def __init__(self, *args, **kwargs):
session = kwargs.pop('session', requests.session())
cache_dir = kwargs.pop('cache_dir', CACHE_DIR)
self.session = session
kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache'))
kwargs.setdefault('directory', os.path.join(cache_dir, 'hash-cache'))
super(HashCache, self).__init__(*args, **kwargs)

def get_hash(self, location):
# if there is no location hash (i.e., md5 / sha256 / etc) we on't want to store it
hash_value = None
vcs = VcsSupport()
vcs = pip_shims.VcsSupport()
orig_scheme = location.scheme
new_location = copy.deepcopy(location)
if orig_scheme in vcs.all_schemes:
new_location.url = new_location.url.split("+", 1)[-1]
can_hash = new_location.hash
if can_hash:
# hash url WITH fragment
hash_value = self.get(new_location.url)
hash_value = self._get_file_hash(new_location.url) if not new_location.url.startswith("ssh") else None
if not hash_value:
hash_value = self._get_file_hash(new_location)
hash_value = hash_value.encode('utf8')
Expand All @@ -226,41 +223,117 @@ def get_hash(self, location):
return hash_value.decode('utf8')

def _get_file_hash(self, location):
h = hashlib.new(FAVORITE_HASH)
with open_local_or_remote_file(location, self.session) as fp:
h = hashlib.new(pip_shims.FAVORITE_HASH)
with vistir.contextmanagers.open_file(location, self.session) as fp:
for chunk in iter(lambda: fp.read(8096), b""):
h.update(chunk)
return ":".join([FAVORITE_HASH, h.hexdigest()])
return ":".join([pip_shims.FAVORITE_HASH, h.hexdigest()])


@contextmanager
def open_local_or_remote_file(link, session):
"""
Open local or remote file for reading.
class _JSONCache(object):
"""A persistent cache backed by a JSON file.
The cache file is written to the appropriate user cache dir for the
current platform, i.e.
~/.cache/pip-tools/depcache-pyX.Y.json
:type link: pip._internal.index.Link
:type session: requests.Session
:raises ValueError: If link points to a local directory.
:return: a context manager to the opened file-like object
Where X.Y indicates the Python version.
"""
if isinstance(link, Link):
url = link.url_without_fragment
else:
url = link

if is_file_url(link):
# Local URL
local_path = url_to_path(url)
if os.path.isdir(local_path):
raise ValueError("Cannot open directory for read: {}".format(url))
filename_format = None

def __init__(self, cache_dir=CACHE_DIR):
vistir.mkdir_p(cache_dir)
python_version = ".".join(str(digit) for digit in sys.version_info[:2])
cache_filename = self.filename_format.format(
python_version=python_version,
)
self._cache_file = os.path.join(cache_dir, cache_filename)
self._cache = None

@property
def cache(self):
"""The dictionary that is the actual in-memory cache.
This property lazily loads the cache from disk.
"""
if self._cache is None:
self.read_cache()
return self._cache

def as_cache_key(self, ireq):
"""Given a requirement, return its cache key.
This behavior is a little weird in order to allow backwards
compatibility with cache files. For a requirement without extras, this
will return, for example::
("ipython", "2.1.0")
For a requirement with extras, the extras will be comma-separated and
appended to the version, inside brackets, like so::
("ipython", "2.1.0[nbconvert,notebook]")
"""
extras = tuple(sorted(ireq.extras))
if not extras:
extras_string = ""
else:
with open(local_path, 'rb') as local_file:
yield local_file
else:
# Remote URL
headers = {"Accept-Encoding": "identity"}
response = session.get(url, headers=headers, stream=True)
extras_string = "[{}]".format(",".join(extras))
name = key_from_req(ireq.req)
version = get_pinned_version(ireq)
return name, "{}{}".format(version, extras_string)

def read_cache(self):
"""Reads the cached contents into memory.
"""
if os.path.exists(self._cache_file):
self._cache = read_cache_file(self._cache_file)
else:
self._cache = {}

def write_cache(self):
"""Writes the cache to disk as JSON.
"""
doc = {
'__format__': 1,
'dependencies': self._cache,
}
with open(self._cache_file, 'w') as f:
json.dump(doc, f, sort_keys=True)

def clear(self):
self._cache = {}
self.write_cache()

def __contains__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return pkgversion_and_extras in self.cache.get(pkgname, {})

def __getitem__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return self.cache[pkgname][pkgversion_and_extras]

def __setitem__(self, ireq, values):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
self.cache.setdefault(pkgname, {})
self.cache[pkgname][pkgversion_and_extras] = values
self.write_cache()

def __delitem__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
try:
yield response.raw
finally:
response.close()
del self.cache[pkgname][pkgversion_and_extras]
except KeyError:
return
self.write_cache()

def get(self, ireq, default=None):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default)


class RequiresPythonCache(_JSONCache):
"""Cache a candidate's Requires-Python information.
"""
filename_format = "pyreqcache-py{python_version}.json"
6 changes: 3 additions & 3 deletions pipenv/vendor/requirementslib/models/markers.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,12 @@ def from_pipfile(cls, name, pipfile):
marker_strings = ["{0} {1}".format(k, pipfile[k]) for k in found_keys]
if pipfile.get("markers"):
marker_strings.append(pipfile.get("markers"))
markers = []
markers = set()
for marker in marker_strings:
markers.append(marker)
markers.add(marker)
combined_marker = None
try:
combined_marker = cls.make_marker(" and ".join(markers))
combined_marker = cls.make_marker(" and ".join(sorted(markers)))
except RequirementError:
pass
else:
Expand Down
10 changes: 4 additions & 6 deletions pipenv/vendor/requirementslib/models/pipfile.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
import os

from vistir.compat import Path

from .requirements import Requirement
Expand Down Expand Up @@ -30,10 +28,10 @@ def load(cls, path):
with pipfile_path.open(encoding="utf-8") as fp:
pipfile = super(Pipfile, cls).load(fp)
pipfile.dev_requirements = [
Requirement.from_pipfile(k, v) for k, v in pipfile.dev_packages.items()
Requirement.from_pipfile(k, v) for k, v in pipfile.get("dev-packages", {}).items()
]
pipfile.requirements = [
Requirement.from_pipfile(k, v) for k, v in pipfile.packages.items()
Requirement.from_pipfile(k, v) for k, v in pipfile.get("packages", {}).items()
]
pipfile.path = pipfile_path
return pipfile
Expand All @@ -57,10 +55,10 @@ def load(cls, path):
def dev_packages(self, as_requirements=True):
if as_requirements:
return self.dev_requirements
return self.dev_packages
return self.get('dev-packages', {})

@property
def packages(self, as_requirements=True):
if as_requirements:
return self.requirements
return self.packages
return self.get('packages', {})
31 changes: 22 additions & 9 deletions pipenv/vendor/requirementslib/models/requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,8 +517,11 @@ def vcs_uri(self):
return uri

def get_commit_hash(self, src_dir=None):
is_local = False
if is_file_url(self.uri):
is_local = True
src_dir = os.environ.get('SRC_DIR', None) if not src_dir else src_dir
if not src_dir:
if not src_dir and not is_local:
_src_dir = TemporaryDirectory()
atexit.register(_src_dir.cleanup)
src_dir = _src_dir.name
Expand All @@ -530,12 +533,16 @@ def get_commit_hash(self, src_dir=None):
checkout_directory=checkout_dir,
vcs_type=self.vcs
)
vcsrepo.obtain()
if not is_local:
vcsrepo.obtain()
return vcsrepo.get_commit_hash()

def update_repo(self, src_dir=None, ref=None):
is_local = False
if is_file_url(self.uri):
is_local = True
src_dir = os.environ.get('SRC_DIR', None) if not src_dir else src_dir
if not src_dir:
if not src_dir and not is_local:
_src_dir = TemporaryDirectory()
atexit.register(_src_dir.cleanup)
src_dir = _src_dir.name
Expand All @@ -548,12 +555,17 @@ def update_repo(self, src_dir=None, ref=None):
checkout_directory=checkout_dir,
vcs_type=self.vcs
)
if not os.path.exists(checkout_dir):
vcsrepo.obtain()
else:
vcsrepo.update()
if not is_local:
if not not os.path.exists(checkout_dir):
vcsrepo.obtain()
else:
vcsrepo.update()
return vcsrepo.get_commit_hash()

def lock_vcs_ref(self):
self.ref = self.get_commit_hash()
self.req.revision = self.ref

@req.default
def get_requirement(self):
name = self.name or self.link.egg_fragment
Expand Down Expand Up @@ -884,12 +896,13 @@ def from_pipfile(cls, name, pipfile):
def as_line(self, sources=None, include_hashes=True, include_extras=True):
"""Format this requirement as a line in requirements.txt.
If `sources` provided, it should be an sequence of mappings, containing
If ``sources`` provided, it should be an sequence of mappings, containing
all possible sources to be used for this requirement.
If `sources` is omitted or falsy, no index information will be included
If ``sources`` is omitted or falsy, no index information will be included
in the requirement line.
"""

include_specifiers = True if self.specifiers else False
if self.is_vcs:
include_extras = False
Expand Down
Loading

0 comments on commit 597bc8c

Please sign in to comment.