From 2553ebcbf7e2574c997a9164234e0adf077ffcac Mon Sep 17 00:00:00 2001 From: Kenneth Reitz Date: Thu, 28 Sep 2017 14:25:43 -0400 Subject: [PATCH] vendor pip-tools Signed-off-by: Kenneth Reitz --- pipenv/patched/piptools/__init__.py | 0 pipenv/patched/piptools/__main__.py | 16 + pipenv/patched/piptools/_compat/__init__.py | 13 + pipenv/patched/piptools/_compat/contextlib.py | 123 ++++ pipenv/patched/piptools/_compat/tempfile.py | 86 +++ pipenv/patched/piptools/cache.py | 164 +++++ pipenv/patched/piptools/click.py | 6 + pipenv/patched/piptools/exceptions.py | 36 + pipenv/patched/piptools/io.py | 649 ++++++++++++++++++ pipenv/patched/piptools/locations.py | 19 + pipenv/patched/piptools/logging.py | 35 + .../patched/piptools/repositories/__init__.py | 3 + pipenv/patched/piptools/repositories/base.py | 40 ++ pipenv/patched/piptools/repositories/local.py | 65 ++ pipenv/patched/piptools/repositories/pypi.py | 232 +++++++ pipenv/patched/piptools/resolver.py | 296 ++++++++ pipenv/patched/piptools/scripts/__init__.py | 0 pipenv/patched/piptools/scripts/compile.py | 258 +++++++ pipenv/patched/piptools/scripts/sync.py | 74 ++ pipenv/patched/piptools/sync.py | 165 +++++ pipenv/patched/piptools/utils.py | 247 +++++++ pipenv/patched/piptools/writer.py | 151 ++++ setup.py | 1 - 23 files changed, 2678 insertions(+), 1 deletion(-) create mode 100755 pipenv/patched/piptools/__init__.py create mode 100755 pipenv/patched/piptools/__main__.py create mode 100755 pipenv/patched/piptools/_compat/__init__.py create mode 100755 pipenv/patched/piptools/_compat/contextlib.py create mode 100755 pipenv/patched/piptools/_compat/tempfile.py create mode 100755 pipenv/patched/piptools/cache.py create mode 100755 pipenv/patched/piptools/click.py create mode 100755 pipenv/patched/piptools/exceptions.py create mode 100755 pipenv/patched/piptools/io.py create mode 100755 pipenv/patched/piptools/locations.py create mode 100755 pipenv/patched/piptools/logging.py create mode 100755 pipenv/patched/piptools/repositories/__init__.py create mode 100755 pipenv/patched/piptools/repositories/base.py create mode 100755 pipenv/patched/piptools/repositories/local.py create mode 100755 pipenv/patched/piptools/repositories/pypi.py create mode 100755 pipenv/patched/piptools/resolver.py create mode 100755 pipenv/patched/piptools/scripts/__init__.py create mode 100755 pipenv/patched/piptools/scripts/compile.py create mode 100755 pipenv/patched/piptools/scripts/sync.py create mode 100755 pipenv/patched/piptools/sync.py create mode 100755 pipenv/patched/piptools/utils.py create mode 100755 pipenv/patched/piptools/writer.py diff --git a/pipenv/patched/piptools/__init__.py b/pipenv/patched/piptools/__init__.py new file mode 100755 index 0000000000..e69de29bb2 diff --git a/pipenv/patched/piptools/__main__.py b/pipenv/patched/piptools/__main__.py new file mode 100755 index 0000000000..22bd78797a --- /dev/null +++ b/pipenv/patched/piptools/__main__.py @@ -0,0 +1,16 @@ +import click +from piptools.scripts import compile, sync + + +@click.group() +def cli(): + pass + + +cli.add_command(compile.cli, 'compile') +cli.add_command(sync.cli, 'sync') + + +# Enable ``python -m piptools ...``. +if __name__ == '__main__': # pragma: no cover + cli() diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py new file mode 100755 index 0000000000..4ec6d61b37 --- /dev/null +++ b/pipenv/patched/piptools/_compat/__init__.py @@ -0,0 +1,13 @@ +# coding: utf-8 +# flake8: noqa +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import six + +if six.PY2: + from .tempfile import TemporaryDirectory + from .contextlib import ExitStack +else: + from tempfile import TemporaryDirectory + from contextlib import ExitStack diff --git a/pipenv/patched/piptools/_compat/contextlib.py b/pipenv/patched/piptools/_compat/contextlib.py new file mode 100755 index 0000000000..b0e161bb8c --- /dev/null +++ b/pipenv/patched/piptools/_compat/contextlib.py @@ -0,0 +1,123 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import sys +from collections import deque + + +# Inspired by discussions on http://bugs.python.org/issue13585 +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list throw an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering the + method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with + # statement + _cm_type = type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + if not self._exit_callbacks: + return + + # This looks complicated, but it is really just + # setting up a chain of try-expect statements to ensure + # that outer callbacks still get invoked even if an + # inner one throws an exception + def _invoke_next_callback(exc_details): + # Callbacks are removed from the list in FIFO order + # but the recursion means they're invoked in LIFO order + cb = self._exit_callbacks.popleft() + if not self._exit_callbacks: + # Innermost callback is invoked directly + return cb(*exc_details) + # More callbacks left, so descend another level in the stack + try: + suppress_exc = _invoke_next_callback(exc_details) + except: + suppress_exc = cb(*sys.exc_info()) + # Check if this cb suppressed the inner exception + if not suppress_exc: + raise + else: + # Check if inner cb suppressed the original exception + if suppress_exc: + exc_details = (None, None, None) + suppress_exc = cb(*exc_details) or suppress_exc + return suppress_exc + # Kick off the recursive chain + return _invoke_next_callback(exc_details) diff --git a/pipenv/patched/piptools/_compat/tempfile.py b/pipenv/patched/piptools/_compat/tempfile.py new file mode 100755 index 0000000000..a003d0805d --- /dev/null +++ b/pipenv/patched/piptools/_compat/tempfile.py @@ -0,0 +1,86 @@ +# coding: utf-8 +from __future__ import absolute_import, division, print_function + +import os as _os +import sys as _sys +import warnings as _warnings +from tempfile import mkdtemp + + +class TemporaryDirectory(object): + """Create and return a temporary directory. This has the same + behavior as mkdtemp but can be used as a context manager. For + example: + + with TemporaryDirectory() as tmpdir: + ... + + Upon exiting the context, the directory and everything contained + in it are removed. + """ + + def __init__(self, suffix="", prefix="tmp", dir=None): + self._closed = False + self.name = None # Handle mkdtemp raising an exception + self.name = mkdtemp(suffix, prefix, dir) + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self.name + + def cleanup(self): + if self.name and not self._closed: + try: + self._rmtree(self.name) + except (TypeError, AttributeError) as ex: + # Issue #10188: Emit a warning on stderr + # if the directory could not be cleaned + # up due to missing globals + if "None" not in str(ex): + raise + print("ERROR: {!r} while cleaning up {!r}".format(ex, self,), + file=_sys.stderr) + return + self._closed = True + + def __exit__(self, exc, value, tb): + self.cleanup() + + def __del__(self): + # Issue a ResourceWarning if implicit cleanup needed + self.cleanup() + + # XXX (ncoghlan): The following code attempts to make + # this class tolerant of the module nulling out process + # that happens during CPython interpreter shutdown + # Alas, it doesn't actually manage it. See issue #10188 + _listdir = staticmethod(_os.listdir) + _path_join = staticmethod(_os.path.join) + _isdir = staticmethod(_os.path.isdir) + _islink = staticmethod(_os.path.islink) + _remove = staticmethod(_os.remove) + _rmdir = staticmethod(_os.rmdir) + _warn = _warnings.warn + + def _rmtree(self, path): + # Essentially a stripped down version of shutil.rmtree. We can't + # use globals because they may be None'ed out at shutdown. + for name in self._listdir(path): + fullname = self._path_join(path, name) + try: + isdir = self._isdir(fullname) and not self._islink(fullname) + except OSError: + isdir = False + if isdir: + self._rmtree(fullname) + else: + try: + self._remove(fullname) + except OSError: + pass + try: + self._rmdir(path) + except OSError: + pass diff --git a/pipenv/patched/piptools/cache.py b/pipenv/patched/piptools/cache.py new file mode 100755 index 0000000000..9500ff529f --- /dev/null +++ b/pipenv/patched/piptools/cache.py @@ -0,0 +1,164 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import json +import os +import sys + +from pkg_resources import Requirement + +from .exceptions import PipToolsError +from .locations import CACHE_DIR +from .utils import as_tuple, key_from_req, lookup_table + + +class CorruptCacheError(PipToolsError): + def __init__(self, path): + self.path = path + + def __str__(self): + lines = [ + 'The dependency cache seems to have been corrupted.', + 'Inspect, or delete, the following file:', + ' {}'.format(self.path), + ] + return os.linesep.join(lines) + + +def read_cache_file(cache_file_path): + with open(cache_file_path, 'r') as cache_file: + try: + doc = json.load(cache_file) + except ValueError: + raise CorruptCacheError(cache_file_path) + + # Check version and load the contents + assert doc['__format__'] == 1, 'Unknown cache file format' + return doc['dependencies'] + + +class DependencyCache(object): + """ + Creates a new persistent dependency cache for the current Python version. + The cache file is written to the appropriate user cache dir for the + current platform, i.e. + + ~/.cache/pip-tools/depcache-pyX.Y.json + + Where X.Y indicates the Python version. + """ + def __init__(self, cache_dir=None): + if cache_dir is None: + cache_dir = CACHE_DIR + if not os.path.isdir(cache_dir): + os.makedirs(cache_dir) + py_version = '.'.join(str(digit) for digit in sys.version_info[:2]) + cache_filename = 'depcache-py{}.json'.format(py_version) + + self._cache_file = os.path.join(cache_dir, cache_filename) + self._cache = None + + @property + def cache(self): + """ + The dictionary that is the actual in-memory cache. This property + lazily loads the cache from disk. + """ + if self._cache is None: + self.read_cache() + return self._cache + + def as_cache_key(self, ireq): + """ + Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards + compatibility with cache files. For a requirement without extras, this will return, for example: + + ("ipython", "2.1.0") + + For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets, + like so: + + ("ipython", "2.1.0[nbconvert,notebook]") + """ + name, version, extras = as_tuple(ireq) + if not extras: + extras_string = "" + else: + extras_string = "[{}]".format(",".join(extras)) + return name, "{}{}".format(version, extras_string) + + def read_cache(self): + """Reads the cached contents into memory.""" + if os.path.exists(self._cache_file): + self._cache = read_cache_file(self._cache_file) + else: + self._cache = {} + + def write_cache(self): + """Writes the cache to disk as JSON.""" + doc = { + '__format__': 1, + 'dependencies': self._cache, + } + with open(self._cache_file, 'w') as f: + json.dump(doc, f, sort_keys=True) + + def clear(self): + self._cache = {} + self.write_cache() + + def __contains__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return pkgversion_and_extras in self.cache.get(pkgname, {}) + + def __getitem__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache[pkgname][pkgversion_and_extras] + + def __setitem__(self, ireq, values): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + self.cache.setdefault(pkgname, {}) + self.cache[pkgname][pkgversion_and_extras] = values + self.write_cache() + + def get(self, ireq, default=None): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) + + def reverse_dependencies(self, ireqs): + """ + Returns a lookup table of reverse dependencies for all the given ireqs. + + Since this is all static, it only works if the dependency cache + contains the complete data, otherwise you end up with a partial view. + This is typically no problem if you use this function after the entire + dependency tree is resolved. + """ + ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] + return self._reverse_dependencies(ireqs_as_cache_values) + + def _reverse_dependencies(self, cache_keys): + """ + Returns a lookup table of reverse dependencies for all the given cache keys. + + Example input: + + [('pep8', '1.5.7'), + ('flake8', '2.4.0'), + ('mccabe', '0.3'), + ('pyflakes', '0.8.1')] + + Example output: + + {'pep8': ['flake8'], + 'flake8': [], + 'mccabe': ['flake8'], + 'pyflakes': ['flake8']} + + """ + # First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'), + # ('flake8', 'mccabe'), ...] + return lookup_table((key_from_req(Requirement.parse(dep_name)), name) + for name, version_and_extras in cache_keys + for dep_name in self.cache[name][version_and_extras]) diff --git a/pipenv/patched/piptools/click.py b/pipenv/patched/piptools/click.py new file mode 100755 index 0000000000..4bab11cb7e --- /dev/null +++ b/pipenv/patched/piptools/click.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import + +import click +click.disable_unicode_literals_warning = True + +from click import * # noqa diff --git a/pipenv/patched/piptools/exceptions.py b/pipenv/patched/piptools/exceptions.py new file mode 100755 index 0000000000..d2ced265d5 --- /dev/null +++ b/pipenv/patched/piptools/exceptions.py @@ -0,0 +1,36 @@ +class PipToolsError(Exception): + pass + + +class NoCandidateFound(PipToolsError): + def __init__(self, ireq, candidates_tried): + self.ireq = ireq + self.candidates_tried = candidates_tried + + def __str__(self): + sorted_versions = sorted(c.version for c in self.candidates_tried) + lines = [ + 'Could not find a version that matches {}'.format(self.ireq), + 'Tried: {}'.format(', '.join(str(version) for version in sorted_versions) or '(no version found at all)') + ] + return '\n'.join(lines) + + +class UnsupportedConstraint(PipToolsError): + def __init__(self, message, constraint): + super(UnsupportedConstraint, self).__init__(message) + self.constraint = constraint + + def __str__(self): + message = super(UnsupportedConstraint, self).__str__() + return '{} (constraint was: {})'.format(message, str(self.constraint)) + + +class IncompatibleRequirements(PipToolsError): + def __init__(self, ireq_a, ireq_b): + self.ireq_a = ireq_a + self.ireq_b = ireq_b + + def __str__(self): + message = "Incompatible requirements found: {} and {}" + return message.format(self.ireq_a, self.ireq_b) diff --git a/pipenv/patched/piptools/io.py b/pipenv/patched/piptools/io.py new file mode 100755 index 0000000000..a22d12a53b --- /dev/null +++ b/pipenv/patched/piptools/io.py @@ -0,0 +1,649 @@ +# -*- coding: utf-8 -*- +# +# NOTE: +# The classes in this module are vendored from boltons: +# https://github.com/mahmoud/boltons/blob/master/boltons/fileutils.py +# +"""Virtually every Python programmer has used Python for wrangling +disk contents, and ``fileutils`` collects solutions to some of the +most commonly-found gaps in the standard library. +""" + +from __future__ import print_function + +import os +import re +import sys +import stat +import errno +import fnmatch +from shutil import copy2, copystat, Error + + +__all__ = ['mkdir_p', 'atomic_save', 'AtomicSaver', 'FilePerms', + 'iter_find_files', 'copytree'] + + +FULL_PERMS = 511 # 0777 that both Python 2 and 3 can digest +RW_PERMS = 438 +_SINGLE_FULL_PERM = 7 # or 07 in Python 2 +try: + basestring +except NameError: + unicode = str # Python 3 compat + basestring = (str, bytes) + + +def mkdir_p(path): + """Creates a directory and any parent directories that may need to + be created along the way, without raising errors for any existing + directories. This function mimics the behavior of the ``mkdir -p`` + command available in Linux/BSD environments, but also works on + Windows. + """ + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST and os.path.isdir(path): + return + raise + return + + +class FilePerms(object): + """The :class:`FilePerms` type is used to represent standard POSIX + filesystem permissions: + + * Read + * Write + * Execute + + Across three classes of user: + + * Owning (u)ser + * Owner's (g)roup + * Any (o)ther user + + This class assists with computing new permissions, as well as + working with numeric octal ``777``-style and ``rwx``-style + permissions. Currently it only considers the bottom 9 permission + bits; it does not support sticky bits or more advanced permission + systems. + + Args: + user (str): A string in the 'rwx' format, omitting characters + for which owning user's permissions are not provided. + group (str): A string in the 'rwx' format, omitting characters + for which owning group permissions are not provided. + other (str): A string in the 'rwx' format, omitting characters + for which owning other/world permissions are not provided. + + There are many ways to use :class:`FilePerms`: + + >>> FilePerms(user='rwx', group='xrw', other='wxr') # note character order + FilePerms(user='rwx', group='rwx', other='rwx') + >>> int(FilePerms('r', 'r', '')) + 288 + >>> oct(288)[-3:] # XXX Py3k + '440' + + See also the :meth:`FilePerms.from_int` and + :meth:`FilePerms.from_path` classmethods for useful alternative + ways to construct :class:`FilePerms` objects. + """ + # TODO: consider more than the lower 9 bits + class _FilePermProperty(object): + _perm_chars = 'rwx' + _perm_set = frozenset('rwx') + _perm_val = {'r': 4, 'w': 2, 'x': 1} # for sorting + + def __init__(self, attribute, offset): + self.attribute = attribute + self.offset = offset + + def __get__(self, fp_obj, type_=None): + if fp_obj is None: + return self + return getattr(fp_obj, self.attribute) + + def __set__(self, fp_obj, value): + cur = getattr(fp_obj, self.attribute) + if cur == value: + return + try: + invalid_chars = set(str(value)) - self._perm_set + except TypeError: + raise TypeError('expected string, not %r' % value) + if invalid_chars: + raise ValueError('got invalid chars %r in permission' + ' specification %r, expected empty string' + ' or one or more of %r' + % (invalid_chars, value, self._perm_chars)) + + sort_key = (lambda c: self._perm_val[c]) + new_value = ''.join(sorted(set(value), + key=sort_key, reverse=True)) + setattr(fp_obj, self.attribute, new_value) + self._update_integer(fp_obj, new_value) + + def _update_integer(self, fp_obj, value): + mode = 0 + key = 'xwr' + for symbol in value: + bit = 2 ** key.index(symbol) + mode |= (bit << (self.offset * 3)) + fp_obj._integer |= mode + + def __init__(self, user='', group='', other=''): + self._user, self._group, self._other = '', '', '' + self._integer = 0 + self.user = user + self.group = group + self.other = other + + @classmethod + def from_int(cls, i): + """Create a :class:`FilePerms` object from an integer. + + >>> FilePerms.from_int(0o644) # note the leading zero-oh for octal + FilePerms(user='rw', group='r', other='r') + """ + i &= FULL_PERMS + key = ('', 'x', 'w', 'xw', 'r', 'rx', 'rw', 'rwx') + parts = [] + while i: + parts.append(key[i & _SINGLE_FULL_PERM]) + i >>= 3 + parts.reverse() + return cls(*parts) + + @classmethod + def from_path(cls, path): + """Make a new :class:`FilePerms` object based on the permissions + assigned to the file or directory at *path*. + + Args: + path (str): Filesystem path of the target file. + + >>> from os.path import expanduser + >>> 'r' in FilePerms.from_path(expanduser('~')).user # probably + True + """ + stat_res = os.stat(path) + return cls.from_int(stat.S_IMODE(stat_res.st_mode)) + + def __int__(self): + return self._integer + + # Sphinx tip: attribute docstrings come after the attribute + user = _FilePermProperty('_user', 2) + "Stores the ``rwx``-formatted *user* permission." + group = _FilePermProperty('_group', 1) + "Stores the ``rwx``-formatted *group* permission." + other = _FilePermProperty('_other', 0) + "Stores the ``rwx``-formatted *other* permission." + + def __repr__(self): + cn = self.__class__.__name__ + return ('%s(user=%r, group=%r, other=%r)' + % (cn, self.user, self.group, self.other)) + +#### + + +_TEXT_OPENFLAGS = os.O_RDWR | os.O_CREAT | os.O_EXCL +if hasattr(os, 'O_NOINHERIT'): + _TEXT_OPENFLAGS |= os.O_NOINHERIT +if hasattr(os, 'O_NOFOLLOW'): + _TEXT_OPENFLAGS |= os.O_NOFOLLOW +_BIN_OPENFLAGS = _TEXT_OPENFLAGS +if hasattr(os, 'O_BINARY'): + _BIN_OPENFLAGS |= os.O_BINARY + + +try: + import fcntl as fcntl +except ImportError: + def set_cloexec(fd): + "Dummy set_cloexec for platforms without fcntl support" + pass +else: + def set_cloexec(fd): + """Does a best-effort :func:`fcntl.fcntl` call to set a fd to be + automatically closed by any future child processes. + + Implementation from the :mod:`tempfile` module. + """ + try: + flags = fcntl.fcntl(fd, fcntl.F_GETFD, 0) + except IOError: + pass + else: + # flags read successfully, modify + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(fd, fcntl.F_SETFD, flags) + return + + +def atomic_save(dest_path, **kwargs): + """A convenient interface to the :class:`AtomicSaver` type. See the + :class:`AtomicSaver` documentation for details. + """ + return AtomicSaver(dest_path, **kwargs) + + +def path_to_unicode(path): + if isinstance(path, unicode): + return path + encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() + return path.decode(encoding) + + +if os.name == 'nt': + import ctypes + from ctypes import c_wchar_p + from ctypes.wintypes import DWORD, LPVOID + + _ReplaceFile = ctypes.windll.kernel32.ReplaceFile + _ReplaceFile.argtypes = [c_wchar_p, c_wchar_p, c_wchar_p, + DWORD, LPVOID, LPVOID] + + def replace(src, dst): + # argument names match stdlib docs, docstring below + try: + # ReplaceFile fails if the dest file does not exist, so + # first try to rename it into position + os.rename(src, dst) + return + except WindowsError as we: + if we.errno == errno.EEXIST: + pass # continue with the ReplaceFile logic below + else: + raise + + src = path_to_unicode(src) + dst = path_to_unicode(dst) + res = _ReplaceFile(c_wchar_p(dst), c_wchar_p(src), + None, 0, None, None) + if not res: + raise OSError('failed to replace %r with %r' % (dst, src)) + return + + def atomic_rename(src, dst, overwrite=False): + "Rename *src* to *dst*, replacing *dst* if *overwrite is True" + if overwrite: + replace(src, dst) + else: + os.rename(src, dst) + return +else: + # wrapper func for cross compat + docs + def replace(src, dst): + # os.replace does the same thing on unix + return os.rename(src, dst) + + def atomic_rename(src, dst, overwrite=False): + "Rename *src* to *dst*, replacing *dst* if *overwrite is True" + if overwrite: + os.rename(src, dst) + else: + os.link(src, dst) + os.unlink(dst) + return + + +_atomic_rename = atomic_rename # backwards compat + +replace.__doc__ = """Similar to :func:`os.replace` in Python 3.3+, +this function will atomically create or replace the file at path +*dst* with the file at path *src*. + +On Windows, this function uses the ReplaceFile API for maximum +possible atomicity on a range of filesystems. +""" + + +class AtomicSaver(object): + """``AtomicSaver`` is a configurable `context manager`_ that provides + a writable :class:`file` which will be moved into place as long as + no exceptions are raised within the context manager's block. These + "part files" are created in the same directory as the destination + path to ensure atomic move operations (i.e., no cross-filesystem + moves occur). + + Args: + dest_path (str): The path where the completed file will be + written. + overwrite (bool): Whether to overwrite the destination file if + it exists at completion time. Defaults to ``True``. + file_perms (int): Integer representation of file permissions + for the newly-created file. Defaults are, when the + destination path already exists, to copy the permissions + from the previous file, or if the file did not exist, to + respect the user's configured `umask`_, usually resulting + in octal 0644 or 0664. + part_file (str): Name of the temporary *part_file*. Defaults + to *dest_path* + ``.part``. Note that this argument is + just the filename, and not the full path of the part + file. To guarantee atomic saves, part files are always + created in the same directory as the destination path. + overwrite_part (bool): Whether to overwrite the *part_file*, + should it exist at setup time. Defaults to ``False``, + which results in an :exc:`OSError` being raised on + pre-existing part files. Be careful of setting this to + ``True`` in situations when multiple threads or processes + could be writing to the same part file. + rm_part_on_exc (bool): Remove *part_file* on exception cases. + Defaults to ``True``, but ``False`` can be useful for + recovery in some cases. Note that resumption is not + automatic and by default an :exc:`OSError` is raised if + the *part_file* exists. + + Practically, the AtomicSaver serves a few purposes: + + * Avoiding overwriting an existing, valid file with a partially + written one. + * Providing a reasonable guarantee that a part file only has one + writer at a time. + * Optional recovery of partial data in failure cases. + + .. _context manager: https://docs.python.org/2/reference/compound_stmts.html#with + .. _umask: https://en.wikipedia.org/wiki/Umask + + """ + _default_file_perms = RW_PERMS + + # TODO: option to abort if target file modify date has changed since start? + def __init__(self, dest_path, **kwargs): + self.dest_path = dest_path + self.overwrite = kwargs.pop('overwrite', True) + self.file_perms = kwargs.pop('file_perms', None) + self.overwrite_part = kwargs.pop('overwrite_part', False) + self.part_filename = kwargs.pop('part_file', None) + self.rm_part_on_exc = kwargs.pop('rm_part_on_exc', True) + self.text_mode = kwargs.pop('text_mode', False) # for windows + self.buffering = kwargs.pop('buffering', -1) + if kwargs: + raise TypeError('unexpected kwargs: %r' % (kwargs.keys(),)) + + self.dest_path = os.path.abspath(self.dest_path) + self.dest_dir = os.path.dirname(self.dest_path) + if not self.part_filename: + self.part_path = dest_path + '.part' + else: + self.part_path = os.path.join(self.dest_dir, self.part_filename) + self.mode = 'w+' if self.text_mode else 'w+b' + self.open_flags = _TEXT_OPENFLAGS if self.text_mode else _BIN_OPENFLAGS + + self.part_file = None + + def _open_part_file(self): + do_chmod = True + file_perms = self.file_perms + if file_perms is None: + try: + # try to copy from file being replaced + stat_res = os.stat(self.dest_path) + file_perms = stat.S_IMODE(stat_res.st_mode) + except (OSError, IOError): + # default if no destination file exists + file_perms = self._default_file_perms + do_chmod = False # respect the umask + + fd = os.open(self.part_path, self.open_flags, file_perms) + set_cloexec(fd) + self.part_file = os.fdopen(fd, self.mode, self.buffering) + + # if default perms are overridden by the user or previous dest_path + # chmod away the effects of the umask + if do_chmod: + try: + os.chmod(self.part_path, file_perms) + except (OSError, IOError): + self.part_file.close() + raise + return + + def setup(self): + """Called on context manager entry (the :keyword:`with` statement), + the ``setup()`` method creates the temporary file in the same + directory as the destination file. + + ``setup()`` tests for a writable directory with rename permissions + early, as the part file may not be written to immediately (not + using :func:`os.access` because of the potential issues of + effective vs. real privileges). + + If the caller is not using the :class:`AtomicSaver` as a + context manager, this method should be called explicitly + before writing. + """ + if os.path.lexists(self.dest_path): + if not self.overwrite: + raise OSError(errno.EEXIST, + 'Overwrite disabled and file already exists', + self.dest_path) + if self.overwrite_part and os.path.lexists(self.part_path): + os.unlink(self.part_path) + self._open_part_file() + return + + def __enter__(self): + self.setup() + return self.part_file + + def __exit__(self, exc_type, exc_val, exc_tb): + self.part_file.close() + if exc_type: + if self.rm_part_on_exc: + try: + os.unlink(self.part_path) + except Exception: + pass # avoid masking original error + return + try: + atomic_rename(self.part_path, self.dest_path, + overwrite=self.overwrite) + except OSError: + if self.rm_part_on_exc: + try: + os.unlink(self.part_path) + except Exception: + pass # avoid masking original error + raise # could not save destination file + return + + +_CUR_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def iter_find_files(directory, patterns, ignored=None): + """Returns a generator that yields file paths under a *directory*, + matching *patterns* using `glob`_ syntax (e.g., ``*.txt``). Also + supports *ignored* patterns. + + Args: + directory (str): Path that serves as the root of the + search. Yielded paths will include this as a prefix. + patterns (str or list): A single pattern or list of + glob-formatted patterns to find under *directory*. + ignored (str or list): A single pattern or list of + glob-formatted patterns to ignore. + + For example, finding Python files in the current directory: + + >>> filenames = sorted(iter_find_files(_CUR_DIR, '*.py')) + >>> filenames[-1].split('/')[-1] + 'typeutils.py' + + Or, Python files while ignoring emacs lockfiles: + + >>> filenames = iter_find_files(_CUR_DIR, '*.py', ignored='.#*') + + .. _glob: https://en.wikipedia.org/wiki/Glob_%28programming%29 + + """ + if isinstance(patterns, basestring): + patterns = [patterns] + pats_re = re.compile('|'.join([fnmatch.translate(p) for p in patterns])) + + if not ignored: + ignored = [] + elif isinstance(ignored, basestring): + ignored = [ignored] + ign_re = re.compile('|'.join([fnmatch.translate(p) for p in ignored])) + for root, dirs, files in os.walk(directory): + for basename in files: + if pats_re.match(basename): + if ignored and ign_re.match(basename): + continue + filename = os.path.join(root, basename) + yield filename + return + + +def copy_tree(src, dst, symlinks=False, ignore=None): + """The ``copy_tree`` function is an exact copy of the built-in + :func:`shutil.copytree`, with one key difference: it will not + raise an exception if part of the tree already exists. It achieves + this by using :func:`mkdir_p`. + + Args: + src (str): Path of the source directory to copy. + dst (str): Destination path. Existing directories accepted. + symlinks (bool): If ``True``, copy symlinks rather than their + contents. + ignore (callable): A callable that takes a path and directory + listing, returning the files within the listing to be ignored. + + For more details, check out :func:`shutil.copytree` and + :func:`shutil.copy2`. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + mkdir_p(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if symlinks and os.path.islink(srcname): + linkto = os.readlink(srcname) + os.symlink(linkto, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore) + else: + # Will raise a SpecialFileError for unsupported file types + copy2(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as e: + errors.extend(e.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.append((src, dst, str(why))) + if errors: + raise Error(errors) + + +copytree = copy_tree # alias for drop-in replacement of shutil + + +try: + file +except NameError: + file = object + + +# like open(os.devnull) but with even fewer side effects +class DummyFile(file): + # TODO: raise ValueErrors on closed for all methods? + # TODO: enforce read/write + def __init__(self, path, mode='r', buffering=None): + self.name = path + self.mode = mode + self.closed = False + self.errors = None + self.isatty = False + self.encoding = None + self.newlines = None + self.softspace = 0 + + def close(self): + self.closed = True + + def fileno(self): + return -1 + + def flush(self): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def next(self): + raise StopIteration() + + def read(self, size=0): + if self.closed: + raise ValueError('I/O operation on a closed file') + return '' + + def readline(self, size=0): + if self.closed: + raise ValueError('I/O operation on a closed file') + return '' + + def readlines(self, size=0): + if self.closed: + raise ValueError('I/O operation on a closed file') + return [] + + def seek(self): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def tell(self): + if self.closed: + raise ValueError('I/O operation on a closed file') + return 0 + + def truncate(self): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def write(self, string): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def writelines(self, list_of_strings): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def __next__(self): + raise StopIteration() + + def __enter__(self): + if self.closed: + raise ValueError('I/O operation on a closed file') + return + + def __exit__(self, exc_type, exc_val, exc_tb): + return diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py new file mode 100755 index 0000000000..aa0610bb99 --- /dev/null +++ b/pipenv/patched/piptools/locations.py @@ -0,0 +1,19 @@ +import os +from shutil import rmtree + +from .click import secho +from pip.utils.appdirs import user_cache_dir + +# The user_cache_dir helper comes straight from pip itself +CACHE_DIR = user_cache_dir('pip-tools') + +# NOTE +# We used to store the cache dir under ~/.pip-tools, which is not the +# preferred place to store caches for any platform. This has been addressed +# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly +# to the user when this directory is still found. +LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') + +if os.path.exists(LEGACY_CACHE_DIR): + secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') + rmtree(LEGACY_CACHE_DIR) diff --git a/pipenv/patched/piptools/logging.py b/pipenv/patched/piptools/logging.py new file mode 100755 index 0000000000..98f052870e --- /dev/null +++ b/pipenv/patched/piptools/logging.py @@ -0,0 +1,35 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import sys + +from . import click + + +class LogContext(object): + def __init__(self, verbose=False): + self.verbose = verbose + + def log(self, *args, **kwargs): + click.secho(*args, **kwargs) + + def debug(self, *args, **kwargs): + if self.verbose: + self.log(*args, **kwargs) + + def info(self, *args, **kwargs): + self.log(*args, **kwargs) + + def warning(self, *args, **kwargs): + kwargs.setdefault('fg', 'yellow') + kwargs.setdefault('file', sys.stderr) + self.log(*args, **kwargs) + + def error(self, *args, **kwargs): + kwargs.setdefault('fg', 'red') + kwargs.setdefault('file', sys.stderr) + self.log(*args, **kwargs) + + +log = LogContext() diff --git a/pipenv/patched/piptools/repositories/__init__.py b/pipenv/patched/piptools/repositories/__init__.py new file mode 100755 index 0000000000..ce5142e8c6 --- /dev/null +++ b/pipenv/patched/piptools/repositories/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa +from .local import LocalRequirementsRepository +from .pypi import PyPIRepository diff --git a/pipenv/patched/piptools/repositories/base.py b/pipenv/patched/piptools/repositories/base.py new file mode 100755 index 0000000000..b791eab264 --- /dev/null +++ b/pipenv/patched/piptools/repositories/base.py @@ -0,0 +1,40 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from abc import ABCMeta, abstractmethod + +from six import add_metaclass + + +@add_metaclass(ABCMeta) +class BaseRepository(object): + + def clear_caches(self): + """Should clear any caches used by the implementation.""" + + def freshen_build_caches(self): + """Should start with fresh build/source caches.""" + + @abstractmethod + def find_best_match(self, ireq): + """ + Return a Version object that indicates the best match for the given + InstallRequirement according to the repository. + """ + + @abstractmethod + def get_dependencies(self, ireq): + """ + Given a pinned or an editable InstallRequirement, returns a set of + dependencies (also InstallRequirements, but not necessarily pinned). + They indicate the secondary dependencies for the given requirement. + """ + + @abstractmethod + def get_hashes(self, ireq): + """ + Given a pinned InstallRequire, returns a set of hashes that represent + all of the files for a given requirement. It is not acceptable for an + editable or unpinned requirement to be passed to this function. + """ diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py new file mode 100755 index 0000000000..ea3a39b98f --- /dev/null +++ b/pipenv/patched/piptools/repositories/local.py @@ -0,0 +1,65 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +from piptools.utils import as_tuple, key_from_req, make_install_requirement +from .base import BaseRepository + + +def ireq_satisfied_by_existing_pin(ireq, existing_pin): + """ + Return True if the given InstallationRequirement is satisfied by the + previously encountered version pin. + """ + version = next(iter(existing_pin.req.specifier)).version + return version in ireq.req.specifier + + +class LocalRequirementsRepository(BaseRepository): + """ + The LocalRequirementsRepository proxied the _real_ repository by first + checking if a requirement can be satisfied by existing pins (i.e. the + result of a previous compile step). + + In effect, if a requirement can be satisfied with a version pinned in the + requirements file, we prefer that version over the best match found in + PyPI. This keeps updates to the requirements.txt down to a minimum. + """ + def __init__(self, existing_pins, proxied_repository): + self.repository = proxied_repository + self.existing_pins = existing_pins + + @property + def finder(self): + return self.repository.finder + + @property + def session(self): + return self.repository.session + + @property + def DEFAULT_INDEX_URL(self): + return self.repository.DEFAULT_INDEX_URL + + def clear_caches(self): + self.repository.clear_caches() + + def freshen_build_caches(self): + self.repository.freshen_build_caches() + + def find_best_match(self, ireq, prereleases=None): + key = key_from_req(ireq.req) + existing_pin = self.existing_pins.get(key) + if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): + project, version, _ = as_tuple(existing_pin) + return make_install_requirement( + project, version, ireq.extras, constraint=ireq.constraint + ) + else: + return self.repository.find_best_match(ireq, prereleases) + + def get_dependencies(self, ireq): + return self.repository.get_dependencies(ireq) + + def get_hashes(self, ireq): + return self.repository.get_hashes(ireq) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py new file mode 100755 index 0000000000..598fc97248 --- /dev/null +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -0,0 +1,232 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import hashlib +import os +from contextlib import contextmanager +from shutil import rmtree + +from pip.download import is_file_url, url_to_path +from pip.index import PackageFinder +from pip.req.req_set import RequirementSet +from pip.wheel import Wheel +try: + from pip.utils.hashes import FAVORITE_HASH +except ImportError: + FAVORITE_HASH = 'sha256' + +from ..cache import CACHE_DIR +from ..exceptions import NoCandidateFound +from ..utils import (fs_str, is_pinned_requirement, lookup_table, + make_install_requirement, pip_version_info) +from .base import BaseRepository + +try: + from tempfile import TemporaryDirectory # added in 3.2 +except ImportError: + from .._compat import TemporaryDirectory + + +# Monkey patch pip's Wheel class to support all platform tags. This allows +# pip-tools to generate hashes for all available distributions, not only the +# one for the current platform. + +def _wheel_supported(self, tags=None): + # Ignore current platform. Support everything. + return True + + +def _wheel_support_index_min(self, tags=None): + # All wheels are equal priority for sorting. + return 0 + + +Wheel.supported = _wheel_supported +Wheel.support_index_min = _wheel_support_index_min + + +class PyPIRepository(BaseRepository): + DEFAULT_INDEX_URL = 'https://pypi.python.org/simple' + + """ + The PyPIRepository will use the provided Finder instance to lookup + packages. Typically, it looks up packages on PyPI (the default implicit + config), but any other PyPI mirror can be used if index_urls is + changed/configured on the Finder. + """ + def __init__(self, pip_options, session): + self.session = session + + index_urls = [pip_options.index_url] + pip_options.extra_index_urls + if pip_options.no_index: + index_urls = [] + + self.finder = PackageFinder( + find_links=pip_options.find_links, + index_urls=index_urls, + trusted_hosts=pip_options.trusted_hosts, + allow_all_prereleases=pip_options.pre, + process_dependency_links=pip_options.process_dependency_links, + session=self.session, + ) + + # Caches + # stores project_name => InstallationCandidate mappings for all + # versions reported by PyPI, so we only have to ask once for each + # project + self._available_candidates_cache = {} + + # stores InstallRequirement => list(InstallRequirement) mappings + # of all secondary dependencies for the given requirement, so we + # only have to go to disk once for each requirement + self._dependencies_cache = {} + + # Setup file paths + self.freshen_build_caches() + self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs')) + self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels')) + + def freshen_build_caches(self): + """ + Start with fresh build/source caches. Will remove any old build + caches from disk automatically. + """ + self._build_dir = TemporaryDirectory(fs_str('build')) + self._source_dir = TemporaryDirectory(fs_str('source')) + + @property + def build_dir(self): + return self._build_dir.name + + @property + def source_dir(self): + return self._source_dir.name + + def clear_caches(self): + rmtree(self._download_dir, ignore_errors=True) + rmtree(self._wheel_download_dir, ignore_errors=True) + + def find_all_candidates(self, req_name): + if req_name not in self._available_candidates_cache: + # pip 8 changed the internal API, making this a public method + if pip_version_info >= (8, 0): + candidates = self.finder.find_all_candidates(req_name) + else: + candidates = self.finder._find_all_versions(req_name) + self._available_candidates_cache[req_name] = candidates + return self._available_candidates_cache[req_name] + + def find_best_match(self, ireq, prereleases=None): + """ + Returns a Version object that indicates the best match for the given + InstallRequirement according to the external repository. + """ + if ireq.editable: + return ireq # return itself as the best match + + all_candidates = self.find_all_candidates(ireq.name) + candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) + matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates), + prereleases=prereleases) + + # Reuses pip's internal candidate sort key to sort + matching_candidates = [candidates_by_version[ver] for ver in matching_versions] + if not matching_candidates: + raise NoCandidateFound(ireq, all_candidates) + best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) + + # Turn the candidate into a pinned InstallRequirement + return make_install_requirement( + best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint + ) + + def get_dependencies(self, ireq): + """ + Given a pinned or an editable InstallRequirement, returns a set of + dependencies (also InstallRequirements, but not necessarily pinned). + They indicate the secondary dependencies for the given requirement. + """ + if not (ireq.editable or is_pinned_requirement(ireq)): + raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) + + if ireq not in self._dependencies_cache: + if ireq.link and not ireq.link.is_artifact: + # No download_dir for VCS sources. This also works around pip + # using git-checkout-index, which gets rid of the .git dir. + download_dir = None + else: + download_dir = self._download_dir + if not os.path.isdir(download_dir): + os.makedirs(download_dir) + if not os.path.isdir(self._wheel_download_dir): + os.makedirs(self._wheel_download_dir) + + reqset = RequirementSet(self.build_dir, + self.source_dir, + download_dir=download_dir, + wheel_download_dir=self._wheel_download_dir, + session=self.session) + self._dependencies_cache[ireq] = reqset._prepare_file(self.finder, ireq) + return set(self._dependencies_cache[ireq]) + + def get_hashes(self, ireq): + """ + Given a pinned InstallRequire, returns a set of hashes that represent + all of the files for a given requirement. It is not acceptable for an + editable or unpinned requirement to be passed to this function. + """ + if not is_pinned_requirement(ireq): + raise TypeError( + "Expected pinned requirement, not unpinned or editable, got {}".format(ireq)) + + # We need to get all of the candidates that match our current version + # pin, these will represent all of the files that could possibly + # satisify this constraint. + all_candidates = self.find_all_candidates(ireq.name) + candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) + matching_versions = list( + ireq.specifier.filter((candidate.version for candidate in all_candidates))) + matching_candidates = candidates_by_version[matching_versions[0]] + + return { + self._get_file_hash(candidate.location) + for candidate in matching_candidates + } + + def _get_file_hash(self, location): + h = hashlib.new(FAVORITE_HASH) + with open_local_or_remote_file(location, self.session) as fp: + for chunk in iter(lambda: fp.read(8096), b""): + h.update(chunk) + return ":".join([FAVORITE_HASH, h.hexdigest()]) + + +@contextmanager +def open_local_or_remote_file(link, session): + """ + Open local or remote file for reading. + + :type link: pip.index.Link + :type session: requests.Session + :raises ValueError: If link points to a local directory. + :return: a context manager to the opened file-like object + """ + url = link.url_without_fragment + + if is_file_url(link): + # Local URL + local_path = url_to_path(url) + if os.path.isdir(local_path): + raise ValueError("Cannot open directory for read: {}".format(url)) + else: + with open(local_path, 'rb') as local_file: + yield local_file + else: + # Remote URL + headers = {"Accept-Encoding": "identity"} + response = session.get(url, headers=headers, stream=True) + try: + yield response.raw + finally: + response.close() diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py new file mode 100755 index 0000000000..2906265ca9 --- /dev/null +++ b/pipenv/patched/piptools/resolver.py @@ -0,0 +1,296 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import copy +from functools import partial +from itertools import chain, count +import os + +from first import first +from pip.req import InstallRequirement + +from . import click +from .cache import DependencyCache +from .exceptions import UnsupportedConstraint +from .logging import log +from .utils import (format_requirement, format_specifier, full_groupby, + is_pinned_requirement, key_from_ireq, key_from_req, UNSAFE_PACKAGES) + +green = partial(click.style, fg='green') +magenta = partial(click.style, fg='magenta') + + +class RequirementSummary(object): + """ + Summary of a requirement's properties for comparison purposes. + """ + def __init__(self, ireq): + self.req = ireq.req + self.key = key_from_req(ireq.req) + self.extras = str(sorted(ireq.extras)) + self.specifier = str(ireq.specifier) + + def __eq__(self, other): + return str(self) == str(other) + + def __hash__(self): + return hash(str(self)) + + def __str__(self): + return repr([self.key, self.specifier, self.extras]) + + +class Resolver(object): + def __init__(self, constraints, repository, cache=None, prereleases=False, clear_caches=False, allow_unsafe=False): + """ + This class resolves a given set of constraints (a collection of + InstallRequirement objects) by consulting the given Repository and the + DependencyCache. + """ + self.our_constraints = set(constraints) + self.their_constraints = set() + self.repository = repository + if cache is None: + cache = DependencyCache() # pragma: no cover + self.dependency_cache = cache + self.prereleases = prereleases + self.clear_caches = clear_caches + self.allow_unsafe = allow_unsafe + self.unsafe_constraints = set() + + @property + def constraints(self): + return set(self._group_constraints(chain(self.our_constraints, + self.their_constraints))) + + def resolve_hashes(self, ireqs): + """ + Finds acceptable hashes for all of the given InstallRequirements. + """ + return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs} + + def resolve(self, max_rounds=10): + """ + Finds concrete package versions for all the given InstallRequirements + and their recursive dependencies. The end result is a flat list of + (name, version) tuples. (Or an editable package.) + + Resolves constraints one round at a time, until they don't change + anymore. Protects against infinite loops by breaking out after a max + number rounds. + """ + if self.clear_caches: + self.dependency_cache.clear() + self.repository.clear_caches() + + self.check_constraints(chain(self.our_constraints, + self.their_constraints)) + + # Ignore existing packages + os.environ[str('PIP_EXISTS_ACTION')] = str('i') # NOTE: str() wrapping necessary for Python 2/3 compat + for current_round in count(start=1): + if current_round > max_rounds: + raise RuntimeError('No stable configuration of concrete packages ' + 'could be found for the given constraints after ' + '%d rounds of resolving.\n' + 'This is likely a bug.' % max_rounds) + + log.debug('') + log.debug(magenta('{:^60}'.format('ROUND {}'.format(current_round)))) + has_changed, best_matches = self._resolve_one_round() + log.debug('-' * 60) + log.debug('Result of round {}: {}'.format(current_round, + 'not stable' if has_changed else 'stable, done')) + if not has_changed: + break + + # If a package version (foo==2.0) was built in a previous round, + # and in this round a different version of foo needs to be built + # (i.e. foo==1.0), the directory will exist already, which will + # cause a pip build failure. The trick is to start with a new + # build cache dir for every round, so this can never happen. + self.repository.freshen_build_caches() + + del os.environ['PIP_EXISTS_ACTION'] + # Only include hard requirements and not pip constraints + return {req for req in best_matches if not req.constraint} + + @staticmethod + def check_constraints(constraints): + for constraint in constraints: + if constraint.link is not None and not constraint.editable: + msg = ('pip-compile does not support URLs as packages, unless they are editable. ' + 'Perhaps add -e option?') + raise UnsupportedConstraint(msg, constraint) + + def _group_constraints(self, constraints): + """ + Groups constraints (remember, InstallRequirements!) by their key name, + and combining their SpecifierSets into a single InstallRequirement per + package. For example, given the following constraints: + + Django<1.9,>=1.4.2 + django~=1.5 + Flask~=0.7 + + This will be combined into a single entry per package: + + django~=1.5,<1.9,>=1.4.2 + flask~=0.7 + + """ + for _, ireqs in full_groupby(constraints, key=key_from_ireq): + ireqs = list(ireqs) + editable_ireq = first(ireqs, key=lambda ireq: ireq.editable) + if editable_ireq: + yield editable_ireq # ignore all the other specs: the editable one is the one that counts + continue + + ireqs = iter(ireqs) + # deepcopy the accumulator so as to not modify the self.our_constraints invariant + combined_ireq = copy.deepcopy(next(ireqs)) + combined_ireq.comes_from = None + for ireq in ireqs: + # NOTE we may be losing some info on dropped reqs here + combined_ireq.req.specifier &= ireq.req.specifier + combined_ireq.constraint &= ireq.constraint + # Return a sorted, de-duped tuple of extras + combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))) + yield combined_ireq + + def _resolve_one_round(self): + """ + Resolves one level of the current constraints, by finding the best + match for each package in the repository and adding all requirements + for those best package versions. Some of these constraints may be new + or updated. + + Returns whether new constraints appeared in this round. If no + constraints were added or changed, this indicates a stable + configuration. + """ + # Sort this list for readability of terminal output + constraints = sorted(self.constraints, key=key_from_ireq) + unsafe_constraints = [] + original_constraints = copy.copy(constraints) + if not self.allow_unsafe: + for constraint in original_constraints: + if constraint.name in UNSAFE_PACKAGES: + constraints.remove(constraint) + constraint.req.specifier = None + unsafe_constraints.append(constraint) + + log.debug('Current constraints:') + for constraint in constraints: + log.debug(' {}'.format(constraint)) + + log.debug('') + log.debug('Finding the best candidates:') + best_matches = {self.get_best_match(ireq) for ireq in constraints} + + # Find the new set of secondary dependencies + log.debug('') + log.debug('Finding secondary dependencies:') + + safe_constraints = [] + for best_match in best_matches: + for dep in self._iter_dependencies(best_match): + if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES: + safe_constraints.append(dep) + # Grouping constraints to make clean diff between rounds + theirs = set(self._group_constraints(safe_constraints)) + + # NOTE: We need to compare RequirementSummary objects, since + # InstallRequirement does not define equality + diff = {RequirementSummary(t) for t in theirs} - {RequirementSummary(t) for t in self.their_constraints} + removed = ({RequirementSummary(t) for t in self.their_constraints} - + {RequirementSummary(t) for t in theirs}) + unsafe = ({RequirementSummary(t) for t in unsafe_constraints} - + {RequirementSummary(t) for t in self.unsafe_constraints}) + + has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0 + if has_changed: + log.debug('') + log.debug('New dependencies found in this round:') + for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)): + log.debug(' adding {}'.format(new_dependency)) + log.debug('Removed dependencies in this round:') + for removed_dependency in sorted(removed, key=lambda req: key_from_req(req.req)): + log.debug(' removing {}'.format(removed_dependency)) + log.debug('Unsafe dependencies in this round:') + for unsafe_dependency in sorted(unsafe, key=lambda req: key_from_req(req.req)): + log.debug(' remembering unsafe {}'.format(unsafe_dependency)) + + # Store the last round's results in the their_constraints + self.their_constraints = theirs + # Store the last round's unsafe constraints + self.unsafe_constraints = unsafe_constraints + return has_changed, best_matches + + def get_best_match(self, ireq): + """ + Returns a (pinned or editable) InstallRequirement, indicating the best + match to use for the given InstallRequirement (in the form of an + InstallRequirement). + + Example: + Given the constraint Flask>=0.10, may return Flask==0.10.1 at + a certain moment in time. + + Pinned requirements will always return themselves, i.e. + + Flask==0.10.1 => Flask==0.10.1 + + """ + if ireq.editable: + # NOTE: it's much quicker to immediately return instead of + # hitting the index server + best_match = ireq + elif is_pinned_requirement(ireq): + # NOTE: it's much quicker to immediately return instead of + # hitting the index server + best_match = ireq + else: + best_match = self.repository.find_best_match(ireq, prereleases=self.prereleases) + + # Format the best match + log.debug(' found candidate {} (constraint was {})'.format(format_requirement(best_match), + format_specifier(ireq))) + return best_match + + def _iter_dependencies(self, ireq): + """ + Given a pinned or editable InstallRequirement, collects all the + secondary dependencies for them, either by looking them up in a local + cache, or by reaching out to the repository. + + Editable requirements will never be looked up, as they may have + changed at any time. + """ + if ireq.editable: + for dependency in self.repository.get_dependencies(ireq): + yield dependency + return + elif not is_pinned_requirement(ireq): + raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq)) + + # Now, either get the dependencies from the dependency cache (for + # speed), or reach out to the external repository to + # download and inspect the package version and get dependencies + # from there + if ireq not in self.dependency_cache: + log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow') + dependencies = self.repository.get_dependencies(ireq) + self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies) + + # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] + dependency_strings = self.dependency_cache[ireq] + log.debug(' {:25} requires {}'.format(format_requirement(ireq), + ', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-')) + for dependency_string in dependency_strings: + yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) + + def reverse_dependencies(self, ireqs): + non_editable = [ireq for ireq in ireqs if not ireq.editable] + return self.dependency_cache.reverse_dependencies(non_editable) diff --git a/pipenv/patched/piptools/scripts/__init__.py b/pipenv/patched/piptools/scripts/__init__.py new file mode 100755 index 0000000000..e69de29bb2 diff --git a/pipenv/patched/piptools/scripts/compile.py b/pipenv/patched/piptools/scripts/compile.py new file mode 100755 index 0000000000..5004e1bc53 --- /dev/null +++ b/pipenv/patched/piptools/scripts/compile.py @@ -0,0 +1,258 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import optparse +import os +import sys +import tempfile + +import pip +from pip.req import InstallRequirement, parse_requirements + +from .. import click +from ..exceptions import PipToolsError +from ..logging import log +from ..repositories import LocalRequirementsRepository, PyPIRepository +from ..resolver import Resolver +from ..utils import (assert_compatible_pip_version, dedup, is_pinned_requirement, + key_from_req, UNSAFE_PACKAGES) +from ..writer import OutputWriter + +# Make sure we're using a compatible version of pip +assert_compatible_pip_version() + +DEFAULT_REQUIREMENTS_FILE = 'requirements.in' + + +class PipCommand(pip.basecommand.Command): + name = 'PipCommand' + + +@click.command() +@click.version_option() +@click.option('-v', '--verbose', is_flag=True, help="Show more output") +@click.option('-n', '--dry-run', is_flag=True, help="Only show what would happen, don't change anything") +@click.option('-p', '--pre', is_flag=True, default=None, help="Allow resolving to prereleases (default is not)") +@click.option('-r', '--rebuild', is_flag=True, help="Clear any caches upfront, rebuild from scratch") +@click.option('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa +@click.option('-i', '--index-url', help="Change index URL (defaults to PyPI)", envvar='PIP_INDEX_URL') +@click.option('--extra-index-url', multiple=True, help="Add additional index URL to search", envvar='PIP_EXTRA_INDEX_URL') # noqa +@click.option('--client-cert', help="Path to SSL client certificate, a single file containing the private key and the certificate in PEM format.") # noqa +@click.option('--trusted-host', multiple=True, envvar='PIP_TRUSTED_HOST', + help="Mark this host as trusted, even though it does not have " + "valid or any HTTPS.") +@click.option('--header/--no-header', is_flag=True, default=True, + help="Add header to generated file") +@click.option('--index/--no-index', is_flag=True, default=True, + help="Add index URL to generated file") +@click.option('--emit-trusted-host/--no-emit-trusted-host', is_flag=True, + default=True, help="Add trusted host option to generated file") +@click.option('--annotate/--no-annotate', is_flag=True, default=True, + help="Annotate results, indicating where dependencies come from") +@click.option('-U', '--upgrade', is_flag=True, default=False, + help='Try to upgrade all dependencies to their latest versions') +@click.option('-P', '--upgrade-package', 'upgrade_packages', nargs=1, multiple=True, + help="Specify particular packages to upgrade.") +@click.option('-o', '--output-file', nargs=1, type=str, default=None, + help=('Output file name. Required if more than one input file is given. ' + 'Will be derived from input file otherwise.')) +@click.option('--allow-unsafe', is_flag=True, default=False, + help="Pin packages considered unsafe: {}".format(', '.join(sorted(UNSAFE_PACKAGES)))) +@click.option('--generate-hashes', is_flag=True, default=False, + help="Generate pip 8 style hashes in the resulting requirements file.") +@click.option('--max-rounds', default=10, + help="Maximum number of rounds before resolving the requirements aborts.") +@click.argument('src_files', nargs=-1, type=click.Path(exists=True, allow_dash=True)) +def cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url, + client_cert, trusted_host, header, index, emit_trusted_host, annotate, + upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes, + src_files, max_rounds): + """Compiles requirements.txt from requirements.in specs.""" + log.verbose = verbose + + if len(src_files) == 0: + if os.path.exists(DEFAULT_REQUIREMENTS_FILE): + src_files = (DEFAULT_REQUIREMENTS_FILE,) + elif os.path.exists('setup.py'): + src_files = ('setup.py',) + if not output_file: + output_file = 'requirements.txt' + else: + raise click.BadParameter(("If you do not specify an input file, " + "the default is {} or setup.py").format(DEFAULT_REQUIREMENTS_FILE)) + + if len(src_files) == 1 and src_files[0] == '-': + if not output_file: + raise click.BadParameter('--output-file is required if input is from stdin') + + if len(src_files) > 1 and not output_file: + raise click.BadParameter('--output-file is required if two or more input files are given.') + + if output_file: + dst_file = output_file + else: + base_name = src_files[0].rsplit('.', 1)[0] + dst_file = base_name + '.txt' + + if upgrade and upgrade_packages: + raise click.BadParameter('Only one of --upgrade or --upgrade-package can be provided as an argument.') + + ### + # Setup + ### + + pip_command = get_pip_command() + + pip_args = [] + if find_links: + for link in find_links: + pip_args.extend(['-f', link]) + if index_url: + pip_args.extend(['-i', index_url]) + if extra_index_url: + for extra_index in extra_index_url: + pip_args.extend(['--extra-index-url', extra_index]) + if client_cert: + pip_args.extend(['--client-cert', client_cert]) + if pre: + pip_args.extend(['--pre']) + if trusted_host: + for host in trusted_host: + pip_args.extend(['--trusted-host', host]) + + pip_options, _ = pip_command.parse_args(pip_args) + + session = pip_command._build_session(pip_options) + repository = PyPIRepository(pip_options, session) + + # Proxy with a LocalRequirementsRepository if --upgrade is not specified + # (= default invocation) + if not upgrade and os.path.exists(dst_file): + ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options) + # Exclude packages from --upgrade-package/-P from the existing pins: We want to upgrade. + upgrade_pkgs_key = {key_from_req(InstallRequirement.from_line(pkg).req) for pkg in upgrade_packages} + existing_pins = {key_from_req(ireq.req): ireq + for ireq in ireqs + if is_pinned_requirement(ireq) and key_from_req(ireq.req) not in upgrade_pkgs_key} + repository = LocalRequirementsRepository(existing_pins, repository) + + log.debug('Using indexes:') + # remove duplicate index urls before processing + repository.finder.index_urls = list(dedup(repository.finder.index_urls)) + for index_url in repository.finder.index_urls: + log.debug(' {}'.format(index_url)) + + if repository.finder.find_links: + log.debug('') + log.debug('Configuration:') + for find_link in repository.finder.find_links: + log.debug(' -f {}'.format(find_link)) + + ### + # Parsing/collecting initial requirements + ### + + constraints = [] + for src_file in src_files: + is_setup_file = os.path.basename(src_file) == 'setup.py' + if is_setup_file or src_file == '-': + # pip requires filenames and not files. Since we want to support + # piping from stdin, we need to briefly save the input from stdin + # to a temporary file and have pip read that. also used for + # reading requirements from install_requires in setup.py. + tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False) + if is_setup_file: + from distutils.core import run_setup + dist = run_setup(src_file) + tmpfile.write('\n'.join(dist.install_requires)) + else: + tmpfile.write(sys.stdin.read()) + tmpfile.flush() + constraints.extend(parse_requirements( + tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options)) + else: + constraints.extend(parse_requirements( + src_file, finder=repository.finder, session=repository.session, options=pip_options)) + + # Check the given base set of constraints first + Resolver.check_constraints(constraints) + + try: + resolver = Resolver(constraints, repository, prereleases=pre, + clear_caches=rebuild, allow_unsafe=allow_unsafe) + results = resolver.resolve(max_rounds=max_rounds) + if generate_hashes: + hashes = resolver.resolve_hashes(results) + else: + hashes = None + except PipToolsError as e: + log.error(str(e)) + sys.exit(2) + + log.debug('') + + ## + # Output + ## + + # Compute reverse dependency annotations statically, from the + # dependency cache that the resolver has populated by now. + # + # TODO (1a): reverse deps for any editable package are lost + # what SHOULD happen is that they are cached in memory, just + # not persisted to disk! + # + # TODO (1b): perhaps it's easiest if the dependency cache has an API + # that could take InstallRequirements directly, like: + # + # cache.set(ireq, ...) + # + # then, when ireq is editable, it would store in + # + # editables[egg_name][link_without_fragment] = deps + # editables['pip-tools']['git+...ols.git@future'] = {'click>=3.0', 'six'} + # + # otherwise: + # + # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'} + # + reverse_dependencies = None + if annotate: + reverse_dependencies = resolver.reverse_dependencies(results) + + writer = OutputWriter(src_files, dst_file, dry_run=dry_run, + emit_header=header, emit_index=index, + emit_trusted_host=emit_trusted_host, + annotate=annotate, + generate_hashes=generate_hashes, + default_index_url=repository.DEFAULT_INDEX_URL, + index_urls=repository.finder.index_urls, + trusted_hosts=pip_options.trusted_hosts, + format_control=repository.finder.format_control) + writer.write(results=results, + unsafe_requirements=resolver.unsafe_constraints, + reverse_dependencies=reverse_dependencies, + primary_packages={key_from_req(ireq.req) for ireq in constraints if not ireq.constraint}, + markers={key_from_req(ireq.req): ireq.markers + for ireq in constraints if ireq.markers}, + hashes=hashes, + allow_unsafe=allow_unsafe) + + if dry_run: + log.warning('Dry-run, so nothing updated.') + + +def get_pip_command(): + # Use pip's parser for pip.conf management and defaults. + # General options (find_links, index_url, extra_index_url, trusted_host, + # and pre) are defered to pip. + pip_command = PipCommand() + index_opts = pip.cmdoptions.make_option_group( + pip.cmdoptions.index_group, + pip_command.parser, + ) + pip_command.parser.insert_option_group(0, index_opts) + pip_command.parser.add_option(optparse.Option('--pre', action='store_true', default=False)) + + return pip_command diff --git a/pipenv/patched/piptools/scripts/sync.py b/pipenv/patched/piptools/scripts/sync.py new file mode 100755 index 0000000000..e1d7f5e534 --- /dev/null +++ b/pipenv/patched/piptools/scripts/sync.py @@ -0,0 +1,74 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os +import sys + +import pip + +from .. import click, sync +from ..exceptions import PipToolsError +from ..logging import log +from ..utils import assert_compatible_pip_version, flat_map + +# Make sure we're using a compatible version of pip +assert_compatible_pip_version() + +DEFAULT_REQUIREMENTS_FILE = 'requirements.txt' + + +@click.command() +@click.version_option() +@click.option('-n', '--dry-run', is_flag=True, help="Only show what would happen, don't change anything") +@click.option('--force', is_flag=True, help="Proceed even if conflicts are found") +@click.option('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa +@click.option('-i', '--index-url', help="Change index URL (defaults to PyPI)", envvar='PIP_INDEX_URL') +@click.option('--extra-index-url', multiple=True, help="Add additional index URL to search", envvar='PIP_EXTRA_INDEX_URL') # noqa +@click.option('--no-index', is_flag=True, help="Ignore package index (only looking at --find-links URLs instead)") +@click.option('-q', '--quiet', default=False, is_flag=True, help="Give less output") +@click.argument('src_files', required=False, type=click.Path(exists=True), nargs=-1) +def cli(dry_run, force, find_links, index_url, extra_index_url, no_index, quiet, src_files): + """Synchronize virtual environment with requirements.txt.""" + if not src_files: + if os.path.exists(DEFAULT_REQUIREMENTS_FILE): + src_files = (DEFAULT_REQUIREMENTS_FILE,) + else: + msg = 'No requirement files given and no {} found in the current directory' + log.error(msg.format(DEFAULT_REQUIREMENTS_FILE)) + sys.exit(2) + + if any(src_file.endswith('.in') for src_file in src_files): + msg = ('Some input files have the .in extension, which is most likely an error and can ' + 'cause weird behaviour. You probably meant to use the corresponding *.txt file?') + if force: + log.warning('WARNING: ' + msg) + else: + log.error('ERROR: ' + msg) + sys.exit(2) + + requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True), + src_files) + + try: + requirements = sync.merge(requirements, ignore_conflicts=force) + except PipToolsError as e: + log.error(str(e)) + sys.exit(2) + + installed_dists = pip.get_installed_distributions(skip=[]) + to_install, to_uninstall = sync.diff(requirements, installed_dists) + + install_flags = [] + for link in find_links or []: + install_flags.extend(['-f', link]) + if no_index: + install_flags.append('--no-index') + if index_url: + install_flags.extend(['-i', index_url]) + if extra_index_url: + for extra_index in extra_index_url: + install_flags.extend(['--extra-index-url', extra_index]) + + sys.exit(sync.sync(to_install, to_uninstall, verbose=(not quiet), dry_run=dry_run, + install_flags=install_flags)) diff --git a/pipenv/patched/piptools/sync.py b/pipenv/patched/piptools/sync.py new file mode 100755 index 0000000000..c4eadfbde7 --- /dev/null +++ b/pipenv/patched/piptools/sync.py @@ -0,0 +1,165 @@ +import collections +import os +import sys +from subprocess import check_call + +from . import click +from .exceptions import IncompatibleRequirements, UnsupportedConstraint +from .utils import flat_map, format_requirement, key_from_ireq, key_from_req + +PACKAGES_TO_IGNORE = [ + 'pip', + 'pip-tools', + 'pip-review', + 'pkg-resources', + 'setuptools', + 'wheel', +] + + +def dependency_tree(installed_keys, root_key): + """ + Calculate the dependency tree for the package `root_key` and return + a collection of all its dependencies. Uses a DFS traversal algorithm. + + `installed_keys` should be a {key: requirement} mapping, e.g. + {'django': from_line('django==1.8')} + `root_key` should be the key to return the dependency tree for. + """ + dependencies = set() + queue = collections.deque() + + if root_key in installed_keys: + dep = installed_keys[root_key] + queue.append(dep) + + while queue: + v = queue.popleft() + key = key_from_req(v) + if key in dependencies: + continue + + dependencies.add(key) + + for dep_specifier in v.requires(): + dep_name = key_from_req(dep_specifier) + if dep_name in installed_keys: + dep = installed_keys[dep_name] + + if dep_specifier.specifier.contains(dep.version): + queue.append(dep) + + return dependencies + + +def get_dists_to_ignore(installed): + """ + Returns a collection of package names to ignore when performing pip-sync, + based on the currently installed environment. For example, when pip-tools + is installed in the local environment, it should be ignored, including all + of its dependencies (e.g. click). When pip-tools is not installed + locally, click should also be installed/uninstalled depending on the given + requirements. + """ + installed_keys = {key_from_req(r): r for r in installed} + return list(flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)) + + +def merge(requirements, ignore_conflicts): + by_key = {} + + for ireq in requirements: + if ireq.link is not None and not ireq.editable: + msg = ('pip-compile does not support URLs as packages, unless they are editable. ' + 'Perhaps add -e option?') + raise UnsupportedConstraint(msg, ireq) + + key = ireq.link or key_from_req(ireq.req) + + if not ignore_conflicts: + existing_ireq = by_key.get(key) + if existing_ireq: + # NOTE: We check equality here since we can assume that the + # requirements are all pinned + if ireq.specifier != existing_ireq.specifier: + raise IncompatibleRequirements(ireq, existing_ireq) + + # TODO: Always pick the largest specifier in case of a conflict + by_key[key] = ireq + + return by_key.values() + + +def diff(compiled_requirements, installed_dists): + """ + Calculate which packages should be installed or uninstalled, given a set + of compiled requirements and a list of currently installed modules. + """ + requirements_lut = {r.link or key_from_req(r.req): r for r in compiled_requirements} + + satisfied = set() # holds keys + to_install = set() # holds InstallRequirement objects + to_uninstall = set() # holds keys + + pkgs_to_ignore = get_dists_to_ignore(installed_dists) + for dist in installed_dists: + key = key_from_req(dist) + if key not in requirements_lut: + to_uninstall.add(key) + elif requirements_lut[key].specifier.contains(dist.version): + satisfied.add(key) + + for key, requirement in requirements_lut.items(): + if key not in satisfied: + to_install.add(requirement) + + # Make sure to not uninstall any packages that should be ignored + to_uninstall -= set(pkgs_to_ignore) + + return (to_install, to_uninstall) + + +def sync(to_install, to_uninstall, verbose=False, dry_run=False, pip_flags=None, install_flags=None): + """ + Install and uninstalls the given sets of modules. + """ + if not to_uninstall and not to_install: + click.echo("Everything up-to-date") + + if pip_flags is None: + pip_flags = [] + + if not verbose: + pip_flags += ['-q'] + + if os.environ.get('VIRTUAL_ENV'): + # find pip via PATH + pip = 'pip' + else: + # find pip in same directory as pip-sync entry-point script + pip = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'pip') + + if to_uninstall: + if dry_run: + click.echo("Would uninstall:") + for pkg in to_uninstall: + click.echo(" {}".format(pkg)) + else: + check_call([pip, 'uninstall', '-y'] + pip_flags + sorted(to_uninstall)) + + if to_install: + if install_flags is None: + install_flags = [] + if dry_run: + click.echo("Would install:") + for ireq in to_install: + click.echo(" {}".format(format_requirement(ireq))) + else: + package_args = [] + for ireq in sorted(to_install, key=key_from_ireq): + if ireq.editable: + package_args.extend(['-e', str(ireq.link or ireq.req)]) + else: + package_args.append(str(ireq.req)) + check_call([pip, 'install'] + pip_flags + install_flags + package_args) + return 0 diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py new file mode 100755 index 0000000000..35317f0df5 --- /dev/null +++ b/pipenv/patched/piptools/utils.py @@ -0,0 +1,247 @@ +# coding: utf-8 +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import sys +from itertools import chain, groupby +from collections import OrderedDict + +import pip +from pip.req import InstallRequirement + +from first import first + +from .click import style + + +def safeint(s): + try: + return int(s) + except ValueError: + return 0 + + +pip_version_info = tuple(safeint(digit) for digit in pip.__version__.split('.')) + +UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} + + +def assert_compatible_pip_version(): + # Make sure we're using a reasonably modern version of pip + if not pip_version_info >= (8, 0): + print('pip-compile requires at least version 8.0 of pip ({} found), ' + 'perhaps run `pip install --upgrade pip`?'.format(pip.__version__)) + sys.exit(4) + + +def key_from_ireq(ireq): + """Get a standardized key for an InstallRequirement.""" + if ireq.req is None and ireq.link is not None: + return str(ireq.link) + else: + return key_from_req(ireq.req) + + +def key_from_req(req): + """Get an all-lowercase version of the requirement's name.""" + if hasattr(req, 'key'): + # pip 8.1.1 or below, using pkg_resources + key = req.key + else: + # pip 8.1.2 or above, using packaging + key = req.name + + key = key.replace('_', '-').lower() + return key + + +def comment(text): + return style(text, fg='green') + + +def make_install_requirement(name, version, extras, constraint=False): + # If no extras are specified, the extras string is blank + extras_string = "" + if extras: + # Sort extras for stability + extras_string = "[{}]".format(",".join(sorted(extras))) + + return InstallRequirement.from_line( + str('{}{}=={}'.format(name, extras_string, version)), + constraint=constraint) + + +def format_requirement(ireq, marker=None): + """ + Generic formatter for pretty printing InstallRequirements to the terminal + in a less verbose way than using its `__str__` method. + """ + if ireq.editable: + line = '-e {}'.format(ireq.link) + else: + line = str(ireq.req).lower() + + if marker: + line = '{} ; {}'.format(line, marker) + + return line + + +def format_specifier(ireq): + """ + Generic formatter for pretty printing the specifier part of + InstallRequirements to the terminal. + """ + # TODO: Ideally, this is carried over to the pip library itself + specs = ireq.specifier._specs if ireq.req is not None else [] + specs = sorted(specs, key=lambda x: x._spec[1]) + return ','.join(str(s) for s in specs) or '' + + +def is_pinned_requirement(ireq): + """ + Returns whether an InstallRequirement is a "pinned" requirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + """ + if ireq.editable: + return False + + if len(ireq.specifier._specs) != 1: + return False + + op, version = first(ireq.specifier._specs)._spec + return (op == '==' or op == '===') and not version.endswith('.*') + + +def as_tuple(ireq): + """ + Pulls out the (name: str, version:str, extras:(str)) tuple from the pinned InstallRequirement. + """ + if not is_pinned_requirement(ireq): + raise TypeError('Expected a pinned InstallRequirement, got {}'.format(ireq)) + + name = key_from_req(ireq.req) + version = first(ireq.specifier._specs)._spec[1] + extras = tuple(sorted(ireq.extras)) + return name, version, extras + + +def full_groupby(iterable, key=None): + """Like groupby(), but sorts the input on the group key first.""" + return groupby(sorted(iterable, key=key), key=key) + + +def flat_map(fn, collection): + """Map a function over a collection and flatten the result by one-level""" + return chain.from_iterable(map(fn, collection)) + + +def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False): + """ + Builds a dict-based lookup table (index) elegantly. + + Supports building normal and unique lookup tables. For example: + + >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], + ... lambda s: s[0]) + { + 'b': {'bar', 'baz'}, + 'f': {'foo'}, + 'q': {'quux', 'qux'} + } + + For key functions that uniquely identify values, set unique=True: + + >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], + ... lambda s: s[0], + ... unique=True) + { + 'b': 'baz', + 'f': 'foo', + 'q': 'quux' + } + + The values of the resulting lookup table will be values, not sets. + + For extra power, you can even change the values while building up the LUT. + To do so, use the `keyval` function instead of the `key` arg: + + >>> lookup_table(['foo', 'bar', 'baz', 'qux', 'quux'], + ... keyval=lambda s: (s[0], s[1:])) + { + 'b': {'ar', 'az'}, + 'f': {'oo'}, + 'q': {'uux', 'ux'} + } + + """ + if keyval is None: + if key is None: + keyval = (lambda v: v) + else: + keyval = (lambda v: (key(v), v)) + + if unique: + return dict(keyval(v) for v in values) + + lut = {} + for value in values: + k, v = keyval(value) + try: + s = lut[k] + except KeyError: + if use_lists: + s = lut[k] = list() + else: + s = lut[k] = set() + if use_lists: + s.append(v) + else: + s.add(v) + return dict(lut) + + +def dedup(iterable): + """Deduplicate an iterable object like iter(set(iterable)) but + order-reserved. + """ + return iter(OrderedDict.fromkeys(iterable)) + + +def fs_str(string): + """ + Convert given string to a correctly encoded filesystem string. + + On Python 2, if the input string is unicode, converts it to bytes + encoded with the filesystem encoding. + + On Python 3 returns the string as is, since Python 3 uses unicode + paths and the input string shouldn't be bytes. + + >>> fs_str(u'some path component/Something') + 'some path component/Something' + >>> assert isinstance(fs_str('whatever'), str) + >>> assert isinstance(fs_str(u'whatever'), str) + + :type string: str|unicode + :rtype: str + """ + if isinstance(string, str): + return string + assert not isinstance(string, bytes) + return string.encode(_fs_encoding) + + +_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() diff --git a/pipenv/patched/piptools/writer.py b/pipenv/patched/piptools/writer.py new file mode 100755 index 0000000000..97b6df941b --- /dev/null +++ b/pipenv/patched/piptools/writer.py @@ -0,0 +1,151 @@ +import os +from itertools import chain + +from ._compat import ExitStack +from .click import unstyle +from .io import AtomicSaver +from .logging import log +from .utils import comment, dedup, format_requirement, key_from_req, UNSAFE_PACKAGES + + +class OutputWriter(object): + def __init__(self, src_files, dst_file, dry_run, emit_header, emit_index, + emit_trusted_host, annotate, generate_hashes, + default_index_url, index_urls, trusted_hosts, format_control): + self.src_files = src_files + self.dst_file = dst_file + self.dry_run = dry_run + self.emit_header = emit_header + self.emit_index = emit_index + self.emit_trusted_host = emit_trusted_host + self.annotate = annotate + self.generate_hashes = generate_hashes + self.default_index_url = default_index_url + self.index_urls = index_urls + self.trusted_hosts = trusted_hosts + self.format_control = format_control + + def _sort_key(self, ireq): + return (not ireq.editable, str(ireq.req).lower()) + + def write_header(self): + if self.emit_header: + yield comment('#') + yield comment('# This file is autogenerated by pip-compile') + yield comment('# To update, run:') + yield comment('#') + custom_cmd = os.environ.get('CUSTOM_COMPILE_COMMAND') + if custom_cmd: + yield comment('# {}'.format(custom_cmd)) + else: + params = [] + if not self.emit_index: + params += ['--no-index'] + if not self.emit_trusted_host: + params += ['--no-emit-trusted-host'] + if not self.annotate: + params += ['--no-annotate'] + if self.generate_hashes: + params += ["--generate-hashes"] + params += ['--output-file', self.dst_file] + params += self.src_files + yield comment('# pip-compile {}'.format(' '.join(params))) + yield comment('#') + + def write_index_options(self): + if self.emit_index: + for index, index_url in enumerate(dedup(self.index_urls)): + if index_url.rstrip('/') == self.default_index_url: + continue + flag = '--index-url' if index == 0 else '--extra-index-url' + yield '{} {}'.format(flag, index_url) + + def write_trusted_hosts(self): + if self.emit_trusted_host: + for trusted_host in dedup(self.trusted_hosts): + yield '--trusted-host {}'.format(trusted_host) + + def write_format_controls(self): + for nb in dedup(self.format_control.no_binary): + yield '--no-binary {}'.format(nb) + for ob in dedup(self.format_control.only_binary): + yield '--only-binary {}'.format(ob) + + def write_flags(self): + emitted = False + for line in chain(self.write_index_options(), + self.write_trusted_hosts(), + self.write_format_controls()): + emitted = True + yield line + if emitted: + yield '' + + def _iter_lines(self, results, unsafe_requirements, reverse_dependencies, + primary_packages, markers, hashes, allow_unsafe=False): + for line in self.write_header(): + yield line + for line in self.write_flags(): + yield line + + unsafe_requirements = {r for r in results if r.name in UNSAFE_PACKAGES} if not unsafe_requirements else unsafe_requirements # noqa + packages = {r for r in results if r.name not in UNSAFE_PACKAGES} + + packages = sorted(packages, key=self._sort_key) + + for ireq in packages: + line = self._format_requirement( + ireq, reverse_dependencies, primary_packages, + markers.get(key_from_req(ireq.req)), hashes=hashes) + yield line + + if unsafe_requirements: + unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key) + yield '' + yield comment('# The following packages are considered to be unsafe in a requirements file:') + + for ireq in unsafe_requirements: + req = self._format_requirement(ireq, + reverse_dependencies, + primary_packages, + marker=markers.get(key_from_req(ireq.req)), + hashes=hashes) + if not allow_unsafe: + yield comment('# {}'.format(req)) + else: + yield req + + def write(self, results, unsafe_requirements, reverse_dependencies, + primary_packages, markers, hashes, allow_unsafe=False): + with ExitStack() as stack: + f = None + if not self.dry_run: + f = stack.enter_context(AtomicSaver(self.dst_file)) + + for line in self._iter_lines(results, unsafe_requirements, reverse_dependencies, + primary_packages, markers, hashes, allow_unsafe=allow_unsafe): + log.info(line) + if f: + f.write(unstyle(line).encode('utf-8')) + f.write(os.linesep.encode('utf-8')) + + def _format_requirement(self, ireq, reverse_dependencies, primary_packages, marker=None, hashes=None): + line = format_requirement(ireq, marker=marker) + + ireq_hashes = (hashes if hashes is not None else {}).get(ireq) + if ireq_hashes: + for hash_ in sorted(ireq_hashes): + line += " \\\n --hash={}".format(hash_) + + if not self.annotate or key_from_req(ireq.req) in primary_packages: + return line + + # Annotate what packages this package is required by + required_by = reverse_dependencies.get(ireq.name.lower(), []) + if required_by: + annotation = ", ".join(sorted(required_by)) + line = "{:24}{}{}".format( + line, + " \\\n " if ireq_hashes else " ", + comment("# via " + annotation)) + return line diff --git a/setup.py b/setup.py index ac79760f55..1134e8ee6f 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,6 @@ 'virtualenv', 'pew>=0.1.26', 'pip>=9.0.1', - 'pip-tools>=1.9.0', 'requests>2.18.0', 'flake8', 'urllib3>=1.21.1'