From 47540c2534abfd81aaa80bd3d48810a0330a1bc4 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 19 Oct 2018 20:57:20 -0400 Subject: [PATCH] Upgrade vendored dependencies - Upgrade pythonfinder - Upgrade vistir - Upgrade requirementslib - Vendor backported version of `functools.lru_cache` for performance - Fix editable dependency installation when markers are present - Fix extraneous resource warnings - Fix filesystem output stream encoding issues - Fix pythonfinder non-standard python name issues - Provide full interaction layer to `Pipfile` and `Pipfile.lock` in requirementslib - Fixes #3017 - Fixes #3014 - Fixes #3021 - Fixes #3019 Signed-off-by: Dan Ryan --- pipenv/environments.py | 17 +- pipenv/resolver.py | 1 - .../backports/functools_lru_cache.LICENSE | 7 + .../vendor/backports/functools_lru_cache.py | 184 ++++++++++++ pipenv/vendor/cursor/LICENSE | 5 + pipenv/vendor/cursor/__init__.py | 4 + pipenv/vendor/cursor/cursor.py | 57 ++++ pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/cli.py | 18 +- pipenv/vendor/pythonfinder/environment.py | 3 + pipenv/vendor/pythonfinder/models/mixins.py | 38 ++- pipenv/vendor/pythonfinder/models/path.py | 279 ++++++++++++------ pipenv/vendor/pythonfinder/models/pyenv.py | 204 ++++++++++--- pipenv/vendor/pythonfinder/models/python.py | 77 ++++- pipenv/vendor/pythonfinder/models/windows.py | 29 +- pipenv/vendor/pythonfinder/pythonfinder.py | 34 ++- pipenv/vendor/pythonfinder/utils.py | 40 ++- pipenv/vendor/requirementslib/__init__.py | 2 +- .../vendor/requirementslib/models/lockfile.py | 160 +++++++--- .../vendor/requirementslib/models/pipfile.py | 146 ++++++--- .../vendor/requirementslib/models/project.py | 241 +++++++++++++++ .../requirementslib/models/requirements.py | 27 +- pipenv/vendor/requirementslib/models/utils.py | 12 + pipenv/vendor/vendor.txt | 18 +- pipenv/vendor/vistir/__init__.py | 10 +- pipenv/vendor/vistir/backports/tempfile.py | 2 +- pipenv/vendor/vistir/compat.py | 22 +- pipenv/vendor/vistir/contextmanagers.py | 70 ++++- pipenv/vendor/vistir/misc.py | 129 ++++---- pipenv/vendor/vistir/path.py | 77 +++-- pipenv/vendor/vistir/spin.py | 149 ++++++++++ pipenv/vendor/vistir/termcolors.py | 116 ++++++++ 32 files changed, 1784 insertions(+), 396 deletions(-) create mode 100644 pipenv/vendor/backports/functools_lru_cache.LICENSE create mode 100644 pipenv/vendor/backports/functools_lru_cache.py create mode 100644 pipenv/vendor/cursor/LICENSE create mode 100644 pipenv/vendor/cursor/__init__.py create mode 100644 pipenv/vendor/cursor/cursor.py create mode 100644 pipenv/vendor/requirementslib/models/project.py create mode 100644 pipenv/vendor/vistir/spin.py create mode 100644 pipenv/vendor/vistir/termcolors.py diff --git a/pipenv/environments.py b/pipenv/environments.py index ca05535f33..69b24d107b 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -8,6 +8,8 @@ # I hope I can remove this one day. os.environ["PYTHONDONTWRITEBYTECODE"] = fs_str("1") +PIPENV_IS_CI = bool("CI" in os.environ or "TF_BUILD" in os.environ) + # HACK: Prevent invalid shebangs with Homebrew-installed Python: # https://bugs.python.org/issue22490 os.environ.pop("__PYVENV_LAUNCHER__", None) @@ -68,7 +70,7 @@ Default is to show emojis. This is automatically set on Windows. """ -if os.name == "nt": +if os.name == "nt" or PIPENV_IS_CI: PIPENV_HIDE_EMOJIS = True PIPENV_IGNORE_VIRTUALENVS = bool(os.environ.get("PIPENV_IGNORE_VIRTUALENVS")) @@ -94,7 +96,7 @@ PIPENV_MAX_RETRIES = int(os.environ.get( "PIPENV_MAX_RETRIES", - "1" if "CI" in os.environ else "0", + "1" if PIPENV_IS_CI else "0", )) """Specify how many retries Pipenv should attempt for network requests. @@ -128,9 +130,18 @@ This can make the logs cleaner. Automatically set on Windows, and in CI environments. """ -if os.name == "nt" or "CI" in os.environ: +if PIPENV_IS_CI: PIPENV_NOSPIN = True +PIPENV_SPINNER = "dots" +"""Sets the default spinner type. + +Spinners are identitcal to the node.js spinners and can be found at +https://github.com/sindresorhus/cli-spinners +""" +if os.name == "nt": + PIPENV_SPINNER = "bouncingBar" + PIPENV_PIPFILE = os.environ.get("PIPENV_PIPFILE") """If set, this specifies a custom Pipfile location. diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 6526d99054..8c282b71f8 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -80,7 +80,6 @@ def resolve(packages, pre, project, sources, clear, system): if pypi_mirror_source else project.pipfile_sources ) - print("using sources: %s" % sources) results = resolve( packages, pre=do_pre, diff --git a/pipenv/vendor/backports/functools_lru_cache.LICENSE b/pipenv/vendor/backports/functools_lru_cache.LICENSE new file mode 100644 index 0000000000..5e795a61f3 --- /dev/null +++ b/pipenv/vendor/backports/functools_lru_cache.LICENSE @@ -0,0 +1,7 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/vendor/backports/functools_lru_cache.py b/pipenv/vendor/backports/functools_lru_cache.py new file mode 100644 index 0000000000..707c6c766d --- /dev/null +++ b/pipenv/vendor/backports/functools_lru_cache.py @@ -0,0 +1,184 @@ +from __future__ import absolute_import + +import functools +from collections import namedtuple +from threading import RLock + +_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) + + +@functools.wraps(functools.update_wrapper) +def update_wrapper(wrapper, + wrapped, + assigned = functools.WRAPPER_ASSIGNMENTS, + updated = functools.WRAPPER_UPDATES): + """ + Patch two bugs in functools.update_wrapper. + """ + # workaround for http://bugs.python.org/issue3445 + assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr)) + wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated) + # workaround for https://bugs.python.org/issue17482 + wrapper.__wrapped__ = wrapped + return wrapper + + +class _HashedSeq(list): + __slots__ = 'hashvalue' + + def __init__(self, tup, hash=hash): + self[:] = tup + self.hashvalue = hash(tup) + + def __hash__(self): + return self.hashvalue + + +def _make_key(args, kwds, typed, + kwd_mark=(object(),), + fasttypes=set([int, str, frozenset, type(None)]), + sorted=sorted, tuple=tuple, type=type, len=len): + 'Make a cache key from optionally typed positional and keyword arguments' + key = args + if kwds: + sorted_items = sorted(kwds.items()) + key += kwd_mark + for item in sorted_items: + key += item + if typed: + key += tuple(type(v) for v in args) + if kwds: + key += tuple(type(v) for k, v in sorted_items) + elif len(key) == 1 and type(key[0]) in fasttypes: + return key[0] + return _HashedSeq(key) + + +def lru_cache(maxsize=100, typed=False): + """Least-recently-used cache decorator. + + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + + If *typed* is True, arguments of different types will be cached separately. + For example, f(3.0) and f(3) will be treated as distinct calls with + distinct results. + + Arguments to the cached function must be hashable. + + View the cache statistics named tuple (hits, misses, maxsize, currsize) with + f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. + + See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used + + """ + + # Users should only access the lru_cache through its public API: + # cache_info, cache_clear, and f.__wrapped__ + # The internals of the lru_cache are encapsulated for thread safety and + # to allow the implementation to change (including a possible C version). + + def decorating_function(user_function): + + cache = dict() + stats = [0, 0] # make statistics updateable non-locally + HITS, MISSES = 0, 1 # names for the stats fields + make_key = _make_key + cache_get = cache.get # bound method to lookup key or return None + _len = len # localize the global len() function + lock = RLock() # because linkedlist updates aren't threadsafe + root = [] # root of the circular doubly linked list + root[:] = [root, root, None, None] # initialize by pointing to self + nonlocal_root = [root] # make updateable non-locally + PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields + + if maxsize == 0: + + def wrapper(*args, **kwds): + # no caching, just do a statistics update after a successful call + result = user_function(*args, **kwds) + stats[MISSES] += 1 + return result + + elif maxsize is None: + + def wrapper(*args, **kwds): + # simple caching without ordering or size limit + key = make_key(args, kwds, typed) + result = cache_get(key, root) # root used here as a unique not-found sentinel + if result is not root: + stats[HITS] += 1 + return result + result = user_function(*args, **kwds) + cache[key] = result + stats[MISSES] += 1 + return result + + else: + + def wrapper(*args, **kwds): + # size limited caching that tracks accesses by recency + key = make_key(args, kwds, typed) if kwds or typed else args + with lock: + link = cache_get(key) + if link is not None: + # record recent use of the key by moving it to the front of the list + root, = nonlocal_root + link_prev, link_next, key, result = link + link_prev[NEXT] = link_next + link_next[PREV] = link_prev + last = root[PREV] + last[NEXT] = root[PREV] = link + link[PREV] = last + link[NEXT] = root + stats[HITS] += 1 + return result + result = user_function(*args, **kwds) + with lock: + root, = nonlocal_root + if key in cache: + # getting here means that this same key was added to the + # cache while the lock was released. since the link + # update is already done, we need only return the + # computed result and update the count of misses. + pass + elif _len(cache) >= maxsize: + # use the old root to store the new key and result + oldroot = root + oldroot[KEY] = key + oldroot[RESULT] = result + # empty the oldest link and make it the new root + root = nonlocal_root[0] = oldroot[NEXT] + oldkey = root[KEY] + root[KEY] = root[RESULT] = None + # now update the cache dictionary for the new links + del cache[oldkey] + cache[key] = oldroot + else: + # put result in a new link at the front of the list + last = root[PREV] + link = [last, root, key, result] + last[NEXT] = root[PREV] = cache[key] = link + stats[MISSES] += 1 + return result + + def cache_info(): + """Report cache statistics""" + with lock: + return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) + + def cache_clear(): + """Clear the cache and cache statistics""" + with lock: + cache.clear() + root = nonlocal_root[0] + root[:] = [root, root, None, None] + stats[:] = [0, 0] + + wrapper.__wrapped__ = user_function + wrapper.cache_info = cache_info + wrapper.cache_clear = cache_clear + return update_wrapper(wrapper, user_function) + + return decorating_function diff --git a/pipenv/vendor/cursor/LICENSE b/pipenv/vendor/cursor/LICENSE new file mode 100644 index 0000000000..00023c8025 --- /dev/null +++ b/pipenv/vendor/cursor/LICENSE @@ -0,0 +1,5 @@ +This work is licensed under the Creative Commons +Attribution-ShareAlike 2.5 International License. To view a copy of +this license, visit http://creativecommons.org/licenses/by-sa/2.5/ or +send a letter to Creative Commons, PO Box 1866, Mountain View, +CA 94042, USA. diff --git a/pipenv/vendor/cursor/__init__.py b/pipenv/vendor/cursor/__init__.py new file mode 100644 index 0000000000..76a4f671f8 --- /dev/null +++ b/pipenv/vendor/cursor/__init__.py @@ -0,0 +1,4 @@ +from .cursor import hide, show, HiddenCursor + +__all__ = ["hide", "show", "HiddenCursor"] + diff --git a/pipenv/vendor/cursor/cursor.py b/pipenv/vendor/cursor/cursor.py new file mode 100644 index 0000000000..e4407c02c9 --- /dev/null +++ b/pipenv/vendor/cursor/cursor.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- + +## Author: James Spencer: http://stackoverflow.com/users/1375885/james-spencer +## Packager: Gijs TImmers: https://github.com/GijsTimmers + +## Based on James Spencer's answer on StackOverflow: +## http://stackoverflow.com/questions/5174810/how-to-turn-off-blinking-cursor-in-command-window + +## Licence: CC-BY-SA-2.5 +## http://creativecommons.org/licenses/by-sa/2.5/ + +## This work is licensed under the Creative Commons +## Attribution-ShareAlike 2.5 International License. To view a copy of +## this license, visit http://creativecommons.org/licenses/by-sa/2.5/ or +## send a letter to Creative Commons, PO Box 1866, Mountain View, +## CA 94042, USA. + +import sys +import os + +if os.name == 'nt': + import ctypes + + class _CursorInfo(ctypes.Structure): + _fields_ = [("size", ctypes.c_int), + ("visible", ctypes.c_byte)] + +def hide(stream=sys.stdout): + if os.name == 'nt': + ci = _CursorInfo() + handle = ctypes.windll.kernel32.GetStdHandle(-11) + ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci)) + ci.visible = False + ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci)) + elif os.name == 'posix': + stream.write("\033[?25l") + stream.flush() + +def show(stream=sys.stdout): + if os.name == 'nt': + ci = _CursorInfo() + handle = ctypes.windll.kernel32.GetStdHandle(-11) + ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci)) + ci.visible = True + ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci)) + elif os.name == 'posix': + stream.write("\033[?25h") + stream.flush() + +class HiddenCursor(object): + def __init__(self, stream=sys.stdout): + self._stream = stream + def __enter__(self): + hide(stream=self._stream) + def __exit__(self, type, value, traceback): + show(stream=self._stream) \ No newline at end of file diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index 672724b4bf..9ac6031ca4 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = '1.1.2' +__version__ = '1.1.3' # Add NullHandler to "pythonfinder" logger, because Python2's default root # logger has no handler and warnings like this would be reported: diff --git a/pipenv/vendor/pythonfinder/cli.py b/pipenv/vendor/pythonfinder/cli.py index b5aa7da363..1757c081ad 100644 --- a/pipenv/vendor/pythonfinder/cli.py +++ b/pipenv/vendor/pythonfinder/cli.py @@ -17,7 +17,7 @@ @click.option( "--version", is_flag=True, default=False, help="Display PythonFinder version." ) -@click.option("--ignore-unsupported/--no-unsupported", is_flag=True, default=True, help="Ignore unsupported python versions.") +@click.option("--ignore-unsupported/--no-unsupported", is_flag=True, default=True, envvar="PYTHONFINDER_IGNORE_UNSUPPORTED", help="Ignore unsupported python versions.") @click.version_option(prog_name='pyfinder', version=__version__) @click.pass_context def cli(ctx, find=False, which=False, findall=False, version=False, ignore_unsupported=True): @@ -36,7 +36,7 @@ def cli(ctx, find=False, which=False, findall=False, version=False, ignore_unsup for v in versions: py = v.py_version click.secho( - "Python: {py.version!s} ({py.architecture!s}) @ {py.comes_from.path!s}".format( + "{py.name!s}: {py.version!s} ({py.architecture!s}) @ {py.comes_from.path!s}".format( py=py ), fg="yellow", @@ -47,23 +47,21 @@ def cli(ctx, find=False, which=False, findall=False, version=False, ignore_unsup fg="red", ) if find: - if any([find.startswith("{0}".format(n)) for n in range(10)]): - found = finder.find_python_version(find.strip()) - else: - found = finder.system_path.python_executables + click.secho("Searching for python: {0!s}".format(find.strip()), fg="yellow") + found = finder.find_python_version(find.strip()) if found: - click.echo("Found Python Version: {0}".format(found), color="white") + click.secho("Found python at the following locations:", fg="green") sys.exit(0) else: - click.echo("Failed to find matching executable...") + click.secho("Failed to find matching executable...", fg="yellow") sys.exit(1) elif which: found = finder.system_path.which(which.strip()) if found: - click.echo("Found Executable: {0}".format(found), color="white") + click.secho("Found Executable: {0}".format(found), fg="white") sys.exit(0) else: - click.echo("Failed to find matching executable...") + click.secho("Failed to find matching executable...", fg="yellow") sys.exit(1) else: click.echo("Please provide a command", color="red") diff --git a/pipenv/vendor/pythonfinder/environment.py b/pipenv/vendor/pythonfinder/environment.py index 2cdb5fd959..7c69b9fc37 100644 --- a/pipenv/vendor/pythonfinder/environment.py +++ b/pipenv/vendor/pythonfinder/environment.py @@ -15,3 +15,6 @@ IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False + + +IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py index 8cbd45dfeb..7d4065484c 100644 --- a/pipenv/vendor/pythonfinder/models/mixins.py +++ b/pipenv/vendor/pythonfinder/models/mixins.py @@ -2,12 +2,14 @@ from __future__ import absolute_import, unicode_literals import abc +import attr import operator import six -from ..utils import KNOWN_EXTS, unnest +from ..utils import ensure_path, KNOWN_EXTS, unnest +@attr.s class BasePath(object): def which(self, name): """Search in this path for an executable. @@ -33,7 +35,14 @@ def which(self, name): return found def find_all_python_versions( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): """Search for a specific python version on the path. Return all copies @@ -44,6 +53,7 @@ def find_all_python_versions( :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. :rtype: List[:class:`~pythonfinder.models.PathEntry`] """ @@ -52,7 +62,14 @@ def find_all_python_versions( "find_all_python_versions" if self.is_dir else "find_python_version" ) sub_finder = operator.methodcaller( - call_method, major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + call_method, + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, ) if not self.is_dir: return sub_finder(self) @@ -61,7 +78,14 @@ def find_all_python_versions( return [c for c in sorted(path_filter, key=version_sort, reverse=True)] def find_python_version( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): """Search or self for the specified Python version and return the first match. @@ -72,6 +96,7 @@ def find_python_version( :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. """ @@ -83,12 +108,13 @@ def find_python_version( pre=pre, dev=dev, arch=arch, + name=name, ) is_py = operator.attrgetter("is_python") py_version = operator.attrgetter("as_python") if not self.is_dir: - if self.is_python and self.as_python and version_matcher(self.as_python): - return self + if self.is_python and self.as_python and version_matcher(self.py_version): + return attr.evolve(self) return finder = ( (child, child.as_python) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index c90d9be37c..d54393b6b3 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -10,6 +10,7 @@ from itertools import chain import attr +import six from cached_property import cached_property @@ -19,8 +20,12 @@ from ..environment import PYENV_INSTALLED, PYENV_ROOT from ..exceptions import InvalidPythonVersion from ..utils import ( - ensure_path, filter_pythons, looks_like_python, optional_instance_of, - path_is_known_executable, unnest + ensure_path, + filter_pythons, + looks_like_python, + optional_instance_of, + path_is_known_executable, + unnest, ) from .python import PythonVersion @@ -75,7 +80,7 @@ def version_dict(self): if entry not in self._version_dict[version]: self._version_dict[version].append(entry) continue - if isinstance(entry, VersionPath): + if type(entry).__name__ == "VersionPath": for path in entry.paths.values(): if path not in self._version_dict[version] and path.is_python: self._version_dict[version].append(path) @@ -130,17 +135,16 @@ def _setup_pyenv(self): pyenv_index = self.path_order.index(last_pyenv) except ValueError: return - self.pyenv_finder = PyenvFinder.create(root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported) - # paths = (v.paths.values() for v in self.pyenv_finder.versions.values()) - root_paths = ( - p for path in self.pyenv_finder.expanded_paths for p in path if p.is_root + self.pyenv_finder = PyenvFinder.create( + root=PYENV_ROOT, ignore_unsupported=self.ignore_unsupported ) + root_paths = [p for p in self.pyenv_finder.roots] before_path = self.path_order[: pyenv_index + 1] after_path = self.path_order[pyenv_index + 2 :] self.path_order = ( - before_path + [p.path.as_posix() for p in root_paths] + after_path + before_path + [p.as_posix() for p in root_paths] + after_path ) - self.paths.update({p.path: p for p in root_paths}) + self.paths.update(self.pyenv_finder.roots) self._register_finder("pyenv", self.pyenv_finder) def _setup_windows(self): @@ -155,7 +159,9 @@ def _setup_windows(self): def get_path(self, path): path = ensure_path(path) - _path = self.paths.get(path.as_posix()) + _path = self.paths.get(path) + if not _path: + _path = self.paths.get(path.as_posix()) if not _path and path.as_posix() in self.path_order: _path = PathEntry.create( path=path.absolute(), is_root=True, only_python=self.only_python @@ -163,6 +169,14 @@ def get_path(self, path): self.paths[path.as_posix()] = _path return _path + def _get_paths(self): + return (self.get_path(k) for k in self.path_order) + + @cached_property + def path_entries(self): + paths = self._get_paths() + return paths + def find_all(self, executable): """Search the path for an executable. Return all copies. @@ -171,8 +185,8 @@ def find_all(self, executable): :returns: List[PathEntry] """ sub_which = operator.methodcaller("which", name=executable) - filtered = filter(None, (sub_which(self.get_path(k)) for k in self.path_order)) - return [f for f in filtered] + filtered = (sub_which(self.get_path(k)) for k in self.path_order) + return list(filtered) def which(self, executable): """Search for an executable on the path. @@ -182,11 +196,39 @@ def which(self, executable): :returns: :class:`~pythonfinder.models.PathEntry` object. """ sub_which = operator.methodcaller("which", name=executable) - filtered = filter(None, (sub_which(self.get_path(k)) for k in self.path_order)) - return next((f for f in filtered), None) + filtered = (sub_which(self.get_path(k)) for k in self.path_order) + return next(iter(f for f in filtered if f is not None), None) + + def _filter_paths(self, finder): + return ( + pth for pth in unnest(finder(p) for p in self.path_entries if p is not None) + if pth is not None + ) + + def _get_all_pythons(self, finder): + paths = {p.path.as_posix(): p for p in self._filter_paths(finder)} + paths.update(self.python_executables) + return (p for p in paths.values() if p is not None) + + def get_pythons(self, finder): + sort_key = operator.attrgetter("as_python.version_sort") + return ( + k for k in sorted( + (p for p in self._filter_paths(finder) if p.is_python), + key=sort_key, + reverse=True + ) if k is not None + ) def find_all_python_versions( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): """Search for a specific python version on the path. Return all copies @@ -197,32 +239,46 @@ def find_all_python_versions( :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. :rtype: List[:class:`~pythonfinder.models.PathEntry`] """ sub_finder = operator.methodcaller( "find_all_python_versions", - major, + major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, + name=name, ) + alternate_sub_finder = None + if major and not (minor or patch or pre or dev or arch or name): + alternate_sub_finder = operator.methodcaller( + "find_all_python_versions", + major=None, + name=major + ) if os.name == "nt" and self.windows_finder: windows_finder_version = sub_finder(self.windows_finder) if windows_finder_version: return windows_finder_version - paths = (self.get_path(k) for k in self.path_order) - path_filter = filter( - None, unnest((sub_finder(p) for p in paths if p is not None)) - ) - version_sort = operator.attrgetter("as_python.version_sort") - return [c for c in sorted(path_filter, key=version_sort, reverse=True)] + values = list(self.get_pythons(sub_finder)) + if not values and alternate_sub_finder is not None: + values = list(self.get_pythons(alternate_sub_finder)) + return values def find_python_version( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): """Search for a specific python version on the path. @@ -233,10 +289,24 @@ def find_python_version( :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. :rtype: :class:`~pythonfinder.models.PathEntry` """ + if isinstance(major, six.string_types) and not minor and not patch: + # Only proceed if this is in the format "x.y.z" or similar + if major.count(".") > 0 and major[0].isdigit(): + version = major.split(".", 2) + if len(version) > 3: + major, minor, patch, rest = version + elif len(version) == 3: + major, minor, patch = version + else: + major, minor = version + else: + name = "{0!s}".format(major) + major = None sub_finder = operator.methodcaller( "find_python_version", major, @@ -245,7 +315,15 @@ def find_python_version( pre=pre, dev=dev, arch=arch, + name=name, ) + alternate_sub_finder = None + if major and not (minor or patch or pre or dev or arch or name): + alternate_sub_finder = operator.methodcaller( + "find_all_python_versions", + major=None, + name=major + ) if major and minor and patch: _tuple_pre = pre if pre is not None else False _tuple_dev = dev if dev is not None else False @@ -255,37 +333,41 @@ def find_python_version( windows_finder_version = sub_finder(self.windows_finder) if windows_finder_version: return windows_finder_version - paths = (self.get_path(k) for k in self.path_order) - path_filter = filter(None, (sub_finder(p) for p in paths if p is not None)) - version_sort = operator.attrgetter("as_python.version_sort") - ver = next( - (c for c in sorted(path_filter, key=version_sort, reverse=True)), None - ) + ver = next(iter(self.get_pythons(sub_finder)), None) + if not ver and alternate_sub_finder is not None: + ver = next(iter(self.get_pythons(alternate_sub_finder)), None) if ver: if ver.as_python.version_tuple[:5] in self.python_version_dict: self.python_version_dict[ver.as_python.version_tuple[:5]].append(ver) else: self.python_version_dict[ver.as_python.version_tuple[:5]] = [ver] + print(ver) return ver @classmethod - def create(cls, path=None, system=False, only_python=False, global_search=True, ignore_unsupported=False): + def create( + cls, + path=None, + system=False, + only_python=False, + global_search=True, + ignore_unsupported=True, + ): """Create a new :class:`pythonfinder.models.SystemPath` instance. :param path: Search path to prepend when searching, defaults to None :param path: str, optional - :param system: Whether to use the running python by default instead of searching, defaults to False - :param system: bool, optional - :param only_python: Whether to search only for python executables, defaults to False - :param only_python: bool, optional - :param ignore_unsupported: Whether to ignore unsupported python versions, if False, an error is raised, defaults to True - :param ignore_unsupported: bool, optional + :param bool system: Whether to use the running python by default instead of searching, defaults to False + :param bool only_python: Whether to search only for python executables, defaults to False + :param bool ignore_unsupported: Whether to ignore unsupported python versions, if False, an error is raised, defaults to True :return: A new :class:`pythonfinder.models.SystemPath` instance. :rtype: :class:`pythonfinder.models.SystemPath` """ path_entries = defaultdict(PathEntry) paths = [] + if ignore_unsupported: + os.environ["PYTHONFINDER_IGNORE_UNSUPPORTED"] = fs_str("1") if global_search: paths = os.environ.get("PATH").split(os.pathsep) if path: @@ -316,7 +398,8 @@ class PathEntry(BasePath): _children = attr.ib(default=attr.Factory(dict)) is_root = attr.ib(default=True) only_python = attr.ib(default=False) - py_version = attr.ib(default=None) + name = attr.ib() + py_version = attr.ib() pythons = attr.ib() def __str__(self): @@ -329,17 +412,46 @@ def _filter_children(self): children = self.path.iterdir() return children + def _gen_children(self): + pass_name = self.name != self.path.name + pass_args = {"is_root": False, "only_python": self.only_python} + if pass_name: + pass_args["name"] = self.name + + if not self.is_dir: + yield (self.path.as_posix(), copy.deepcopy(self)) + elif self.is_root: + for child in self._filter_children(): + yield (child.as_posix(), PathEntry.create(path=child, **pass_args)) + return + @cached_property def children(self): - if not self._children and self.is_dir and self.is_root: - self._children = { - child.as_posix(): PathEntry.create(path=child, is_root=False) - for child in self._filter_children() - } - elif not self.is_dir: - self._children = {self.path.as_posix(): self} + if not self._children: + children = {} + for child_key, child_val in self._gen_children(): + children[child_key] = child_val + self._children = children return self._children + @name.default + def get_name(self): + return self.path.name + + @py_version.default + def get_py_version(self): + from ..environment import IGNORE_UNSUPPORTED + if self.is_dir: + return None + if self.is_python: + from .python import PythonVersion + try: + py_version = PythonVersion.from_path(path=self, name=self.name) + except InvalidPythonVersion: + py_version = None + return py_version + return + @pythons.default def get_pythons(self): pythons = defaultdict() @@ -351,56 +463,62 @@ def get_pythons(self): else: if self.is_python: _path = ensure_path(self.path) - pythons[_path.as_posix()] = copy.deepcopy(self) + pythons[_path.as_posix()] = self return pythons @cached_property def as_python(self): + py_version = None + if self.py_version: + return self.py_version if not self.is_dir and self.is_python: - if not self.py_version: - try: - from .python import PythonVersion - - self.py_version = PythonVersion.from_path(self.path) - except (ValueError, InvalidPythonVersion): - self.py_version = None - return self.py_version + try: + from .python import PythonVersion + py_version = PythonVersion.from_path(path=attr.evolve(self), name=self.name) + except (ValueError, InvalidPythonVersion): + py_version = None + return py_version @classmethod - def create(cls, path, is_root=False, only_python=False, pythons=None): + def create(cls, path, is_root=False, only_python=False, pythons=None, name=None): """Helper method for creating new :class:`pythonfinder.models.PathEntry` instances. - :param path: Path to the specified location. - :type path: str - :param is_root: Whether this is a root from the environment PATH variable, defaults to False - :param is_root: bool, optional - :param only_python: Whether to search only for python executables, defaults to False - :param only_python: bool, optional - :param pythons: A dictionary of existing python objects (usually from a finder), defaults to None - :param pythons: dict, optional + :param str path: Path to the specified location. + :param bool is_root: Whether this is a root from the environment PATH variable, defaults to False + :param bool only_python: Whether to search only for python executables, defaults to False + :param dict pythons: A dictionary of existing python objects (usually from a finder), defaults to None + :param str name: Name of the python version, e.g. ``anaconda3-5.3.0`` :return: A new instance of the class. :rtype: :class:`pythonfinder.models.PathEntry` """ target = ensure_path(path) - creation_args = {"path": target, "is_root": is_root, "only_python": only_python} + guessed_name = False + if not name: + guessed_name = True + name = target.name + creation_args = {"path": target, "is_root": is_root, "only_python": only_python, "name": name} if pythons: creation_args["pythons"] = pythons _new = cls(**creation_args) if pythons and only_python: children = {} + child_creation_args = { + "is_root": False, + "py_version": python, + "only_python": only_python + } + if not guessed_name: + child_creation_args["name"] = name for pth, python in pythons.items(): pth = ensure_path(pth) children[pth.as_posix()] = PathEntry( - path=pth, is_root=False, only_python=only_python, py_version=python + path=pth, + **child_creation_args ) _new._children = children return _new - @cached_property - def name(self): - return self.path.name - @cached_property def is_dir(self): try: @@ -416,28 +534,5 @@ def is_executable(self): @cached_property def is_python(self): return self.is_executable and ( - self.py_version or looks_like_python(self.path.name) - ) - - -@attr.s -class VersionPath(SystemPath): - base = attr.ib(default=None, validator=optional_instance_of(Path)) - - @classmethod - def create(cls, path, only_python=True, pythons=None): - """Accepts a path to a base python version directory. - - Generates the pyenv version listings for it""" - path = ensure_path(path) - path_entries = defaultdict(PathEntry) - if not path.name.lower() in ["scripts", "bin"]: - bin_name = "Scripts" if os.name == "nt" else "bin" - bin_dir = path / bin_name - else: - bin_dir = path - current_entry = PathEntry.create( - bin_dir, is_root=True, only_python=True, pythons=pythons + looks_like_python(self.path.name) ) - path_entries[bin_dir.as_posix()] = current_entry - return cls(base=bin_dir, paths=path_entries) diff --git a/pipenv/vendor/pythonfinder/models/pyenv.py b/pipenv/vendor/pythonfinder/models/pyenv.py index 527c5f0af7..1595a963a7 100644 --- a/pipenv/vendor/pythonfinder/models/pyenv.py +++ b/pipenv/vendor/pythonfinder/models/pyenv.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, print_function import logging +import operator from collections import defaultdict @@ -10,9 +11,15 @@ from vistir.compat import Path -from ..utils import ensure_path, optional_instance_of, get_python_version, filter_pythons -from .mixins import BaseFinder -from .path import VersionPath +from ..utils import ( + ensure_path, + optional_instance_of, + get_python_version, + filter_pythons, + unnest, +) +from .mixins import BaseFinder, BasePath +from .path import SystemPath, PathEntry from .python import PythonVersion @@ -20,51 +27,66 @@ @attr.s -class PyenvFinder(BaseFinder): +class PyenvFinder(BaseFinder, BasePath): root = attr.ib(default=None, validator=optional_instance_of(Path)) - # ignore_unsupported should come before versions, because its value is used - # in versions's default initializer. - ignore_unsupported = attr.ib(default=False) + #: ignore_unsupported should come before versions, because its value is used + #: in versions's default initializer. + ignore_unsupported = attr.ib(default=True) + paths = attr.ib(default=attr.Factory(list)) + roots = attr.ib(default=attr.Factory(defaultdict)) versions = attr.ib() pythons = attr.ib() + @property + def expanded_paths(self): + return ( + path for path in unnest(p for p in self.versions.values()) + if path is not None + ) + @classmethod - def version_from_bin_dir(cls, base_dir): - pythons = [py for py in filter_pythons(base_dir)] + def version_from_bin_dir(cls, base_dir, name=None): py_version = None - for py in pythons: - version = get_python_version(py.as_posix()) - try: - py_version = PythonVersion.parse(version) - except Exception: - continue - if py_version: - return py_version - return + version_path = PathEntry.create( + path=base_dir.absolute().as_posix(), + only_python=True, + name=base_dir.parent.name, + ) + py_version = next(iter(version_path.find_all_python_versions()), None) + return py_version @versions.default def get_versions(self): - versions = defaultdict(VersionPath) + versions = defaultdict() bin_ = sysconfig._INSTALL_SCHEMES[sysconfig._get_default_scheme()]["scripts"] for p in self.root.glob("versions/*"): - if p.parent.name == "envs": + if p.parent.name == "envs" or p.name == "envs": continue + bin_dir = Path(bin_.format(base=p.as_posix())) + version_path = None + if bin_dir.exists(): + version_path = PathEntry.create( + path=bin_dir.absolute().as_posix(), + only_python=False, + name=p.name, + is_root=True, + ) + version = None try: version = PythonVersion.parse(p.name) except ValueError: - bin_dir = Path(bin_.format(base=p.as_posix())) - if bin_dir.exists(): - version = self.version_from_bin_dir(bin_dir) - if not version: - if not self.ignore_unsupported: - raise - continue + entry = next(iter(version_path.find_all_python_versions()), None) + if not entry: + if self.ignore_unsupported: + continue + raise + else: + version = entry.py_version.as_dict() except Exception: if not self.ignore_unsupported: raise logger.warning( - 'Unsupported Python version %r, ignoring...', - p.name, exc_info=True + "Unsupported Python version %r, ignoring...", p.name, exc_info=True ) continue if not version: @@ -75,24 +97,128 @@ def get_versions(self): version.get("patch"), version.get("is_prerelease"), version.get("is_devrelease"), - version.get("is_debug") - ) - versions[version_tuple] = VersionPath.create( - path=p.resolve(), only_python=True + version.get("is_debug"), ) + self.roots[p] = version_path + versions[version_tuple] = version_path + self.paths.append(version_path) return versions @pythons.default def get_pythons(self): pythons = defaultdict() - for v in self.versions.values(): - for p in v.paths.values(): - _path = ensure_path(p.path) - if p.is_python: - pythons[_path] = p + for p in self.paths: + pythons.update(p.pythons) return pythons @classmethod - def create(cls, root, ignore_unsupported=False): + def create(cls, root, ignore_unsupported=True): root = ensure_path(root) return cls(root=root, ignore_unsupported=ignore_unsupported) + + def find_all_python_versions( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search for a specific python version on the path. Return all copies + + :param major: Major python version to search for. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. + :rtype: List[:class:`~pythonfinder.models.PathEntry`] + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + py = operator.attrgetter("as_python") + pythons = ( + py_ver for py_ver in (py(p) for p in self.pythons.values() if p is not None) + if py_ver is not None + ) + # pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return sorted(matching_versions, key=version_sort, reverse=True) + + def find_python_version( + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, + ): + """Search or self for the specified Python version and return the first match. + + :param major: Major version number. + :type major: int + :param int minor: Minor python version to search for, defaults to None + :param int patch: Patch python version to search for, defaults to None + :param bool pre: Search for prereleases (default None) - prioritize releases if None + :param bool dev: Search for devreleases (default None) - prioritize releases if None + :param str arch: Architecture to include, e.g. '64bit', defaults to None + :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. + """ + + version_matcher = operator.methodcaller( + "matches", + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + ) + pythons = filter(None, [p.as_python for p in self.pythons.values()]) + matching_versions = filter(lambda py: version_matcher(py), pythons) + version_sort = operator.attrgetter("version_sort") + return next(iter(c for c in sorted(matching_versions, key=version_sort, reverse=True)), None) + + +@attr.s +class VersionPath(SystemPath): + base = attr.ib(default=None, validator=optional_instance_of(Path)) + name = attr.ib(default=None) + + @classmethod + def create(cls, path, only_python=True, pythons=None, name=None): + """Accepts a path to a base python version directory. + + Generates the pyenv version listings for it""" + path = ensure_path(path) + path_entries = defaultdict(PathEntry) + bin_ = sysconfig._INSTALL_SCHEMES[sysconfig._get_default_scheme()]["scripts"] + if path.as_posix().endswith(Path(bin_).name): + path = path.parent + bin_dir = ensure_path(bin_.format(base=path.as_posix())) + if not name: + name = path.name + current_entry = PathEntry.create( + bin_dir, is_root=True, only_python=True, pythons=pythons, name=name + ) + path_entries[bin_dir.as_posix()] = current_entry + return cls(name=name, base=bin_dir, paths=path_entries) diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index c71b9d9bf7..ec99afe731 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -13,7 +13,11 @@ from ..environment import SYSTEM_ARCH from ..utils import ( - _filter_none, ensure_path, get_python_version, optional_instance_of + _filter_none, + ensure_path, + get_python_version, + optional_instance_of, + ensure_path, ) @@ -30,6 +34,7 @@ class PythonVersion(object): architecture = attr.ib(default=None) comes_from = attr.ib(default=None) executable = attr.ib(default=None) + name = attr.ib(default=None) @property def version_sort(self): @@ -65,22 +70,37 @@ def version_tuple(self): self.patch, self.is_prerelease, self.is_devrelease, - self.is_debug + self.is_debug, ) def matches( - self, major=None, minor=None, patch=None, pre=False, dev=False, arch=None, debug=False + self, + major=None, + minor=None, + patch=None, + pre=False, + dev=False, + arch=None, + debug=False, + name=None, ): - if arch and arch.isdigit(): - arch = "{0}bit".format(arch) + if arch: + own_arch = self.get_architecture() + if arch.isdigit(): + arch = "{0}bit".format(arch) return ( (major is None or self.major == major) and (minor is None or self.minor == minor) and (patch is None or self.patch == patch) and (pre is None or self.is_prerelease == pre) and (dev is None or self.is_devrelease == dev) - and (arch is None or self.architecture == arch) + and (arch is None or own_arch == arch) and (debug is None or self.is_debug == debug) + and ( + name is None + or (name and self.name) + and (self.name == name or self.name.startswith(name)) + ) ) def as_major(self): @@ -93,6 +113,18 @@ def as_minor(self): self_dict.update({"patch": None}) return self.create(**self_dict) + def as_dict(self): + return { + "major": self.major, + "minor": self.minor, + "patch": self.patch, + "is_prerelease": self.is_prerelease, + "is_postrelease": self.is_postrelease, + "is_devrelease": self.is_devrelease, + "is_debug": self.is_debug, + "version": self.version, + } + @classmethod def parse(cls, version): """Parse a valid version string into a dictionary @@ -138,8 +170,15 @@ def parse(cls, version): "version": version, } + def get_architecture(self): + if self.architecture: + return self.architecture + arch, _ = platform.architecture(path.path.as_posix()) + self.architecture = arch + return self.architecture + @classmethod - def from_path(cls, path): + def from_path(cls, path, name=None): """Parses a python version from a system path. Raises: @@ -147,29 +186,33 @@ def from_path(cls, path): :param path: A string or :class:`~pythonfinder.models.path.PathEntry` :type path: str or :class:`~pythonfinder.models.path.PathEntry` instance - :param launcher_entry: A python launcher environment object. + :param str name: Name of the python distribution in question :return: An instance of a PythonVersion. :rtype: :class:`~pythonfinder.models.python.PythonVersion` """ from .path import PathEntry + from ..environment import IGNORE_UNSUPPORTED if not isinstance(path, PathEntry): - path = PathEntry.create(path, is_root=False, only_python=True) - if not path.is_python: + path = PathEntry.create(path, is_root=False, only_python=True, name=name) + if not path.is_python and not IGNORE_UNSUPPORTED: raise ValueError("Not a valid python path: %s" % path.path) return - py_version = get_python_version(str(path.path)) + py_version = get_python_version(path.path.as_posix()) instance_dict = cls.parse(py_version) - if not isinstance(instance_dict.get("version"), Version): + if not isinstance(instance_dict.get("version"), Version) and not IGNORE_UNSUPPORTED: raise ValueError("Not a valid python path: %s" % path.path) return - architecture, _ = platform.architecture(path.path.as_posix()) - instance_dict.update({"comes_from": path, "architecture": architecture}) + if not name: + name = path.name + instance_dict.update( + {"comes_from": path, "name": name} + ) return cls(**instance_dict) @classmethod - def from_windows_launcher(cls, launcher_entry): + def from_windows_launcher(cls, launcher_entry, name=None): """Create a new PythonVersion instance from a Windows Launcher Entry :param launcher_entry: A python launcher environment object. @@ -193,12 +236,14 @@ def from_windows_launcher(cls, launcher_entry): launcher_entry.info, "sys_architecture", SYSTEM_ARCH ), "executable": exe_path, + "name": name } ) py_version = cls.create(**creation_dict) - comes_from = PathEntry.create(exe_path, only_python=True) + comes_from = PathEntry.create(exe_path, only_python=True, name=name) comes_from.py_version = copy.deepcopy(py_version) py_version.comes_from = comes_from + py_version.name = comes_from.name return py_version @classmethod diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index fcb4d42a53..e47bcc2c0b 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -22,7 +22,14 @@ class WindowsFinder(BaseFinder): pythons = attr.ib() def find_all_python_versions( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): version_matcher = operator.methodcaller( "matches", @@ -32,6 +39,7 @@ def find_all_python_versions( pre=pre, dev=dev, arch=arch, + name=name, ) py_filter = filter( None, filter(lambda c: version_matcher(c), self.version_list) @@ -40,13 +48,26 @@ def find_all_python_versions( return [c.comes_from for c in sorted(py_filter, key=version_sort, reverse=True)] def find_python_version( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, + major=None, + minor=None, + patch=None, + pre=None, + dev=None, + arch=None, + name=None, ): return next( ( v for v in self.find_all_python_versions( - major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=None, ) ), None, @@ -60,7 +81,7 @@ def get_versions(self): env_versions = pep514env.findall() path = None for version_object in env_versions: - install_path = getattr(version_object.info, 'install_path', None) + install_path = getattr(version_object.info, "install_path", None) if install_path is None: continue path = ensure_path(install_path.__getattr__("")) diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index e965bb511b..19a52e0a3b 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -7,13 +7,13 @@ class Finder(object): - def __init__(self, path=None, system=False, global_search=True, ignore_unsupported=False): + def __init__(self, path=None, system=False, global_search=True, ignore_unsupported=True): """Finder A cross-platform Finder for locating python and other executables. Searches for python and other specified binaries starting in `path`, if supplied, but searching the bin path of `sys.executable` if `system=True`, and then searching in the `os.environ['PATH']` if `global_search=True`. When `global_search` - is `False`, this search operation is restricted to the allowed locations of + is `False`, this search operation is restricted to the allowed locations of `path` and `system`. :param path: A bin-directory search location, defaults to None @@ -57,7 +57,7 @@ def which(self, exe): return self.system_path.which(exe) def find_python_version( - self, major, minor=None, patch=None, pre=None, dev=None, arch=None + self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): from .models import PythonVersion @@ -69,12 +69,24 @@ def find_python_version( and patch is None ): if arch is None and "-" in major: - major, arch = major.rsplit("-", 1) - if not arch.isdigit(): - major = "{0}-{1}".format(major, arch) + orig_string = "{0!s}".format(major) + major, _, arch = major.rpartition("-") + if arch.startswith("x"): + arch = arch.lstrip("x") + if arch.lower().endswith("bit"): + arch = arch.lower().replace("bit", "") + if not (arch.isdigit() and (int(arch) & int(arch) - 1) == 0): + major = orig_string + arch = None else: arch = "{0}bit".format(arch) - version_dict = PythonVersion.parse(major) + try: + version_dict = PythonVersion.parse(major) + except ValueError: + if name is None: + name = "{0!s}".format(major) + major = None + version_dict = {} major = version_dict.get("major", major) minor = version_dict.get("minor", minor) patch = version_dict.get("patch", patch) @@ -83,16 +95,16 @@ def find_python_version( arch = version_dict.get("architecture", arch) if arch is None else arch if os.name == "nt": match = self.windows_finder.find_python_version( - major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) if match: return match return self.system_path.find_python_version( - major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) def find_all_python_versions( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None + self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None ): version_sort = operator.attrgetter("as_python.version_sort") python_version_dict = getattr(self.system_path, "python_version_dict") @@ -109,7 +121,7 @@ def find_all_python_versions( paths = sorted(paths, key=version_sort, reverse=True) return paths versions = self.system_path.find_all_python_versions( - major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch + major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name ) if not isinstance(versions, list): versions = [versions] diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index dced9eabcb..2f5a860da1 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -16,10 +16,15 @@ from .exceptions import InvalidPythonVersion +try: + from functools import lru_cache +except ImportError: + from backports.functools_lru_cache import lru_cache + PYTHON_IMPLEMENTATIONS = ( "python", "ironpython", "jython", "pypy", "anaconda", "miniconda", - "stackless", "activepython" + "stackless", "activepython", "micropython" ) RULES_BASE = ["*{0}", "*{0}?", "*{0}?.?", "*{0}?.?m"] RULES = [rule.format(impl) for impl in PYTHON_IMPLEMENTATIONS for rule in RULES_BASE] @@ -29,7 +34,17 @@ filter(None, os.environ.get("PATHEXT", "").split(os.pathsep)) ) +MATCH_RULES = [] +for rule in RULES: + MATCH_RULES.extend( + [ + "{0}.{1}".format(rule, ext) if ext else "{0}".format(rule) + for ext in KNOWN_EXTS + ] + ) + +@lru_cache(maxsize=128) def get_python_version(path): """Get python version string using subprocess from a given path.""" version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] @@ -54,6 +69,7 @@ def path_is_executable(path): return os.access(str(path), os.X_OK) +@lru_cache(maxsize=1024) def path_is_known_executable(path): return ( path_is_executable(path) @@ -62,24 +78,19 @@ def path_is_known_executable(path): ) +@lru_cache(maxsize=1024) def looks_like_python(name): - match_rules = [] - for rule in RULES: - match_rules.extend( - [ - "{0}.{1}".format(rule, ext) if ext else "{0}".format(rule) - for ext in KNOWN_EXTS - ] - ) if not any(name.lower().startswith(py_name) for py_name in PYTHON_IMPLEMENTATIONS): return False - return any(fnmatch(name, rule) for rule in match_rules) + return any(fnmatch(name, rule) for rule in MATCH_RULES) +@lru_cache(maxsize=128) def path_is_python(path): return path_is_executable(path) and looks_like_python(path.name) +@lru_cache(maxsize=1024) def ensure_path(path): """Given a path (either a string or a Path object), expand variables and return a Path object. @@ -90,13 +101,9 @@ def ensure_path(path): """ if isinstance(path, vistir.compat.Path): - path = path.as_posix() + return path path = vistir.compat.Path(os.path.expandvars(path)) - try: - path = path.resolve() - except OSError: - path = path.absolute() - return path + return path.absolute() def _filter_none(k, v): @@ -105,6 +112,7 @@ def _filter_none(k, v): return False +@lru_cache(maxsize=128) def filter_pythons(path): """Return all valid pythons in a given path""" if not isinstance(path, vistir.compat.Path): diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 1f3a2fcb78..e0bc6746a8 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.1.9' +__version__ = '1.1.10' from .exceptions import RequirementError diff --git a/pipenv/vendor/requirementslib/models/lockfile.py b/pipenv/vendor/requirementslib/models/lockfile.py index f9ca97b83e..bd76ca0177 100644 --- a/pipenv/vendor/requirementslib/models/lockfile.py +++ b/pipenv/vendor/requirementslib/models/lockfile.py @@ -1,17 +1,19 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import -import json +import copy import os +import attr import plette.lockfiles import six -from vistir.compat import Path -from vistir.contextmanagers import atomic_open_for_write +from vistir.compat import Path, FileNotFoundError +from .project import ProjectFile from .requirements import Requirement +from .utils import optional_instance_of DEFAULT_NEWLINES = u"\n" @@ -22,47 +24,125 @@ def preferred_newlines(f): return DEFAULT_NEWLINES -class Lockfile(plette.lockfiles.Lockfile): - def __init__(self, *args, **kwargs): - path = kwargs.pop("path", None) - self._requirements = kwargs.pop("requirements", []) - self._dev_requirements = kwargs.pop("dev_requirements", []) - self.path = Path(path) if path else None - self.newlines = u"\n" - super(Lockfile, self).__init__(*args, **kwargs) +is_lockfile = optional_instance_of(plette.lockfiles.Lockfile) +is_projectfile = optional_instance_of(ProjectFile) + + +@attr.s(slots=True) +class Lockfile(object): + path = attr.ib(validator=optional_instance_of(Path), type=Path) + _requirements = attr.ib(default=attr.Factory(list), type=list) + _dev_requirements = attr.ib(default=attr.Factory(list), type=list) + projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) + _lockfile = attr.ib(validator=is_lockfile, type=plette.lockfiles.Lockfile) + newlines = attr.ib(default=DEFAULT_NEWLINES, type=six.text_type) + + @path.default + def _get_path(self): + return Path(os.curdir).absolute() + + @projectfile.default + def _get_projectfile(self): + return self.load_projectfile(self.path) + + @_lockfile.default + def _get_lockfile(self): + return self.projectfile.lockfile + + def __getattr__(self, k, *args, **kwargs): + retval = None + lockfile = super(Lockfile, self).__getattribute__("_lockfile") + try: + return super(Lockfile, self).__getattribute__(k) + except AttributeError: + retval = getattr(lockfile, k, None) + if not retval: + retval = super(Lockfile, self).__getattribute__(k, *args, **kwargs) + return retval + + @classmethod + def read_projectfile(cls, path): + """Read the specified project file and provide an interface for writing/updating. + + :param str path: Path to the target file. + :return: A project file with the model and location for interaction + :rtype: :class:`~requirementslib.models.project.ProjectFile` + """ + + pf = ProjectFile.read( + path, + plette.lockfiles.Lockfile, + invalid_ok=True + ) + return pf @classmethod - def load(cls, path): + def load_projectfile(cls, path, create=True): + """Given a path, load or create the necessary lockfile. + + :param str path: Path to the project root or lockfile + :param bool create: Whether to create the lockfile if not found, defaults to True + :raises OSError: Thrown if the project root directory doesn't exist + :raises FileNotFoundError: Thrown if the lockfile doesn't exist and ``create=False`` + :return: A project file instance for the supplied project + :rtype: :class:`~requirementslib.models.project.ProjectFile` + """ + if not path: path = os.curdir path = Path(path).absolute() - if path.is_dir(): - path = path / "Pipfile.lock" - elif path.name == "Pipfile": - path = path.parent / "Pipfile.lock" - if not path.exists(): - raise OSError("Path does not exist: %s" % path) - return cls.create(path.parent, lockfile_name=path.name) + project_path = path if path.is_dir() else path.parent + lockfile_path = project_path / "Pipfile.lock" + if not project_path.exists(): + raise OSError("Project does not exist: %s" % project_path.as_posix()) + elif not lockfile_path.exists() and not create: + raise FileNotFoundError("Lockfile does not exist: %s" % lockfile_path.as_posix()) + projectfile = cls.read_projectfile(lockfile_path.as_posix()) + return projectfile @classmethod - def create(cls, project_path, lockfile_name="Pipfile.lock"): - """Create a new lockfile instance + def load(cls, path, create=True): + """Create a new lockfile instance. :param project_path: Path to project root - :type project_path: str or :class:`~pathlib.Path` - :returns: List[:class:`~requirementslib.Requirement`] objects + :type project_path: str or :class:`pathlib.Path` + :param str lockfile_name: Name of the lockfile in the project root directory + :param pipfile_path: Path to the project pipfile + :type pipfile_path: :class:`pathlib.Path` + :returns: A new lockfile representing the supplied project paths + :rtype: :class:`~requirementslib.models.lockfile.Lockfile` """ - if not isinstance(project_path, Path): - project_path = Path(project_path) - lockfile_path = project_path / lockfile_name - with lockfile_path.open(encoding="utf-8") as f: - lockfile = super(Lockfile, cls).load(f) - lockfile.newlines = preferred_newlines(f) - lockfile.path = lockfile_path - return lockfile + projectfile = cls.load_projectfile(path, create=create) + lockfile_path = Path(projectfile.location) + creation_args = { + "projectfile": projectfile, + "lockfile": projectfile.model, + "newlines": projectfile.line_ending, + "path": lockfile_path + } + return cls(**creation_args) + + @classmethod + def create(cls, path, create=True): + return cls.load(path, create=create) + + @property + def develop(self): + return self._lockfile.develop + + @property + def default(self): + return self._lockfile.default def get_requirements(self, dev=False): + """Produces a generator which generates requirements from the desired section. + + :param bool dev: Indicates whether to use dev requirements, defaults to False + :return: Requirements from the relevant the relevant pipfile + :rtype: :class:`~requirementslib.models.requirements.Requirement` + """ + section = self.develop if dev else self.default for k in section.keys(): yield Requirement.from_pipfile(k, section[k]._data) @@ -81,24 +161,26 @@ def requirements(self): @property def dev_requirements_list(self): - return [{name: entry._data} for name, entry in self.develop.items()] + return [{name: entry._data} for name, entry in self._lockfile.develop.items()] @property def requirements_list(self): - return [{name: entry._data} for name, entry in self.develop.items()] + return [{name: entry._data} for name, entry in self._lockfile.default.items()] def write(self): - open_kwargs = {"newline": self.newlines} - with atomic_open_for_write(self.path.as_posix(), **open_kwargs) as f: - super(Lockfile, self).dump(f, encoding="utf-8") + self.projectfile.model = copy.deepcopy(self._lockfile) + self.projectfile.write() def as_requirements(self, include_hashes=False, dev=False): """Returns a list of requirements in pip-style format""" lines = [] section = self.dev_requirements if dev else self.requirements for req in section: - r = req.as_line() - if not include_hashes: - r = r.split("--hash", 1)[0] + kwargs = { + "include_hashes": include_hashes, + } + if req.editable: + kwargs["include_markers"] = False + r = req.as_line(**kwargs) lines.append(r.strip()) return lines diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 3a6f5b1ee8..94e9a2a1f9 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -1,64 +1,144 @@ # -*- coding: utf-8 -*- -from vistir.compat import Path + +from __future__ import absolute_import, unicode_literals, print_function + +import attr +import copy +import os + +from vistir.compat import Path, FileNotFoundError from .requirements import Requirement +from .project import ProjectFile +from .utils import optional_instance_of from ..exceptions import RequirementError import plette.pipfiles -class Pipfile(plette.pipfiles.Pipfile): +is_pipfile = optional_instance_of(plette.pipfiles.Pipfile) +is_path = optional_instance_of(Path) +is_projectfile = optional_instance_of(ProjectFile) + + +@attr.s(slots=True) +class Pipfile(object): + path = attr.ib(validator=is_path, type=Path) + projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) + _pipfile = attr.ib(type=plette.pipfiles.Pipfile) + requirements = attr.ib(default=attr.Factory(list), type=list) + dev_requirements = attr.ib(default=attr.Factory(list), type=list) + + @path.default + def _get_path(self): + return Path(os.curdir).absolute() + + @projectfile.default + def _get_projectfile(self): + return self.load_projectfile(os.curdir, create=False) + + @_pipfile.default + def _get_pipfile(self): + return self.projectfile.model + + def __getattr__(self, k, *args, **kwargs): + retval = None + pipfile = super(Pipfile, self).__getattribute__("_pipfile") + try: + return super(Pipfile, self).__getattribute__(k) + except AttributeError: + retval = getattr(pipfile, k, None) + if not retval: + retval = super(Pipfile, self).__getattribute__(k, *args, **kwargs) + return retval @property def requires_python(self): - return self.requires.requires_python + return self._pipfile.requires.requires_python @property def allow_prereleases(self): - return self.get("pipenv", {}).get("allow_prereleases", False) + return self._pipfile.get("pipenv", {}).get("allow_prereleases", False) @classmethod - def load(cls, path): + def read_projectfile(cls, path): + """Read the specified project file and provide an interface for writing/updating. + + :param str path: Path to the target file. + :return: A project file with the model and location for interaction + :rtype: :class:`~requirementslib.models.project.ProjectFile` + """ + pf = ProjectFile.read( + path, + plette.pipfiles.Pipfile, + invalid_ok=True + ) + return pf + + @classmethod + def load_projectfile(cls, path, create=False): + """Given a path, load or create the necessary pipfile. + + :param str path: Path to the project root or pipfile + :param bool create: Whether to create the pipfile if not found, defaults to True + :raises OSError: Thrown if the project root directory doesn't exist + :raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False`` + :return: A project file instance for the supplied project + :rtype: :class:`~requirementslib.models.project.ProjectFile` + """ + if not path: + raise RuntimeError("Must pass a path to classmethod 'Pipfile.load'") if not isinstance(path, Path): path = Path(path) - pipfile_path = path / "Pipfile" - if not path.exists(): + pipfile_path = path if path.name == "Pipfile" else path.joinpath("Pipfile") + project_path = pipfile_path.parent + if not project_path.exists(): raise FileNotFoundError("%s is not a valid project path!" % path) elif not pipfile_path.exists() or not pipfile_path.is_file(): - raise RequirementError("%s is not a valid Pipfile" % pipfile_path) - with pipfile_path.open(encoding="utf-8") as fp: - pipfile = super(Pipfile, cls).load(fp) - pipfile.dev_requirements = [ - Requirement.from_pipfile(k, v) for k, v in pipfile.get("dev-packages", {}).items() + if not create: + raise RequirementError("%s is not a valid Pipfile" % pipfile_path) + return cls.read_projectfile(pipfile_path.as_posix()) + + @classmethod + def load(cls, path, create=False): + """Given a path, load or create the necessary pipfile. + + :param str path: Path to the project root or pipfile + :param bool create: Whether to create the pipfile if not found, defaults to True + :raises OSError: Thrown if the project root directory doesn't exist + :raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False`` + :return: A pipfile instance pointing at the supplied project + :rtype:: class:`~requirementslib.models.pipfile.Pipfile` + """ + + projectfile = cls.load_projectfile(path, create=create) + pipfile = projectfile.model + dev_requirements = [ + Requirement.from_pipfile(k, v._data) for k, v in pipfile.get("dev-packages", {}).items() ] - pipfile.requirements = [ - Requirement.from_pipfile(k, v) for k, v in pipfile.get("packages", {}).items() + requirements = [ + Requirement.from_pipfile(k, v._data) for k, v in pipfile.get("packages", {}).items() ] - pipfile.path = pipfile_path - return pipfile - - # def resolve(self): - # It would be nice to still use this api someday - # option_sources = [s.expanded for s in self.sources] - # pip_args = [] - # if self.pipenv.allow_prereleases: - # pip_args.append('--pre') - # pip_options = get_pip_options(pip_args, sources=option_sources) - # finder = get_finder(sources=option_sources, pip_options=pip_options) - # resolver = DependencyResolver.create(finder=finder, allow_prereleases=self.pipenv.allow_prereleases) - # pkg_dict = {} - # for pkg in self.dev_packages.requirements + self.packages.requirements: - # pkg_dict[pkg.name] = pkg - # resolver.resolve(list(pkg_dict.values())) - # return resolver + creation_args = { + "projectfile": projectfile, + "pipfile": pipfile, + "dev_requirements": dev_requirements, + "requirements": requirements, + "path": Path(projectfile.location) + } + return cls(**creation_args) + + def write(self): + self.projectfile.model = copy.deepcopy(self._pipfile) + self.projectfile.write() @property def dev_packages(self, as_requirements=True): if as_requirements: return self.dev_requirements - return self.get('dev-packages', {}) + return self._pipfile.get('dev-packages', {}) @property def packages(self, as_requirements=True): if as_requirements: return self.requirements - return self.get('packages', {}) + return self._pipfile.get('packages', {}) diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py new file mode 100644 index 0000000000..f6e037d651 --- /dev/null +++ b/pipenv/vendor/requirementslib/models/project.py @@ -0,0 +1,241 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import collections +import io +import os + +import attr +import packaging.markers +import packaging.utils +import plette +import plette.models +import six +import tomlkit + + +SectionDifference = collections.namedtuple("SectionDifference", [ + "inthis", "inthat", +]) +FileDifference = collections.namedtuple("FileDifference", [ + "default", "develop", +]) + + +def _are_pipfile_entries_equal(a, b): + a = {k: v for k, v in a.items() if k not in ("markers", "hashes", "hash")} + b = {k: v for k, v in b.items() if k not in ("markers", "hashes", "hash")} + if a != b: + return False + try: + marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate() + except (AttributeError, KeyError, TypeError, ValueError): + marker_eval_a = True + try: + marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate() + except (AttributeError, KeyError, TypeError, ValueError): + marker_eval_b = True + return marker_eval_a == marker_eval_b + + +DEFAULT_NEWLINES = "\n" + + +def preferred_newlines(f): + if isinstance(f.newlines, six.text_type): + return f.newlines + return DEFAULT_NEWLINES + + +@attr.s +class ProjectFile(object): + """A file in the Pipfile project. + """ + location = attr.ib() + line_ending = attr.ib() + model = attr.ib() + + @classmethod + def read(cls, location, model_cls, invalid_ok=False): + try: + with io.open(location, encoding="utf-8") as f: + model = model_cls.load(f) + line_ending = preferred_newlines(f) + except Exception: + if not invalid_ok: + raise + model = None + line_ending = DEFAULT_NEWLINES + return cls(location=location, line_ending=line_ending, model=model) + + def write(self): + kwargs = {"encoding": "utf-8", "newline": self.line_ending} + with io.open(self.location, "w", **kwargs) as f: + self.model.dump(f) + + def dumps(self): + strio = six.StringIO() + self.model.dump(strio) + return strio.getvalue() + + +@attr.s +class Project(object): + + root = attr.ib() + _p = attr.ib(init=False) + _l = attr.ib(init=False) + + def __attrs_post_init__(self): + self.root = root = os.path.abspath(self.root) + self._p = ProjectFile.read( + os.path.join(root, "Pipfile"), + plette.Pipfile, + ) + self._l = ProjectFile.read( + os.path.join(root, "Pipfile.lock"), + plette.Lockfile, + invalid_ok=True, + ) + + @property + def pipfile(self): + return self._p.model + + @property + def pipfile_location(self): + return self._p.location + + @property + def lockfile(self): + return self._l.model + + @property + def lockfile_location(self): + return self._l.location + + @lockfile.setter + def lockfile(self, new): + self._l.model = new + + def is_synced(self): + return self.lockfile and self.lockfile.is_up_to_date(self.pipfile) + + def _get_pipfile_section(self, develop, insert=True): + name = "dev-packages" if develop else "packages" + try: + section = self.pipfile[name] + except KeyError: + section = plette.models.PackageCollection(tomlkit.table()) + if insert: + self.pipfile[name] = section + return section + + def contains_key_in_pipfile(self, key): + sections = [ + self._get_pipfile_section(develop=False, insert=False), + self._get_pipfile_section(develop=True, insert=False), + ] + return any( + (packaging.utils.canonicalize_name(name) == + packaging.utils.canonicalize_name(key)) + for section in sections + for name in section + ) + + def add_line_to_pipfile(self, line, develop): + from requirementslib import Requirement + requirement = Requirement.from_line(line) + section = self._get_pipfile_section(develop=develop) + key = requirement.normalized_name + entry = next(iter(requirement.as_pipfile().values())) + if isinstance(entry, dict): + # HACK: TOMLKit prefers to expand tables by default, but we + # always want inline tables here. Also tomlkit.inline_table + # does not have `update()`. + table = tomlkit.inline_table() + for k, v in entry.items(): + table[k] = v + entry = table + section[key] = entry + + def remove_keys_from_pipfile(self, keys, default, develop): + keys = {packaging.utils.canonicalize_name(key) for key in keys} + sections = [] + if default: + sections.append(self._get_pipfile_section( + develop=False, insert=False, + )) + if develop: + sections.append(self._get_pipfile_section( + develop=True, insert=False, + )) + for section in sections: + removals = set() + for name in section: + if packaging.utils.canonicalize_name(name) in keys: + removals.add(name) + for key in removals: + del section._data[key] + + def remove_keys_from_lockfile(self, keys): + keys = {packaging.utils.canonicalize_name(key) for key in keys} + removed = False + for section_name in ("default", "develop"): + try: + section = self.lockfile[section_name] + except KeyError: + continue + removals = set() + for name in section: + if packaging.utils.canonicalize_name(name) in keys: + removals.add(name) + removed = removed or bool(removals) + for key in removals: + del section._data[key] + + if removed: + # HACK: The lock file no longer represents the Pipfile at this + # point. Set the hash to an arbitrary invalid value. + self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""}) + + def difference_lockfile(self, lockfile): + """Generate a difference between the current and given lockfiles. + + Returns a 2-tuple containing differences in default in develop + sections. + + Each element is a 2-tuple of dicts. The first, `inthis`, contains + entries only present in the current lockfile; the second, `inthat`, + contains entries only present in the given one. + + If a key exists in both this and that, but the values differ, the key + is present in both dicts, pointing to values from each file. + """ + diff_data = { + "default": SectionDifference({}, {}), + "develop": SectionDifference({}, {}), + } + for section_name, section_diff in diff_data.items(): + try: + this = self.lockfile[section_name]._data + except (KeyError, TypeError): + this = {} + try: + that = lockfile[section_name]._data + except (KeyError, TypeError): + that = {} + for key, this_value in this.items(): + try: + that_value = that[key] + except KeyError: + section_diff.inthis[key] = this_value + continue + if not _are_pipfile_entries_equal(this_value, that_value): + section_diff.inthis[key] = this_value + section_diff.inthat[key] = that_value + for key, that_value in that.items(): + if key not in this: + section_diff.inthat[key] = that_value + return FileDifference(**diff_data) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 248ca77786..c2768417c3 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- + from __future__ import absolute_import -import atexit import collections import hashlib import os @@ -9,6 +9,7 @@ from contextlib import contextmanager import attr +import six from first import first from packaging.markers import Marker @@ -42,7 +43,8 @@ is_pinned_requirement, make_install_requirement, optional_instance_of, parse_extras, specs_to_string, split_markers_from_line, split_vcs_method_from_uri, strip_ssh_from_git_uri, validate_path, - validate_specifiers, validate_vcs + validate_specifiers, validate_vcs, normalize_name, + Requirement as PkgResourcesRequirement ) from .vcs import VCSRepository @@ -99,7 +101,7 @@ def line_part(self): # FIXME: This should actually be canonicalized but for now we have to # simply lowercase it and replace underscores, since full canonicalization # also replaces dots and that doesn't actually work when querying the index - return "{0}".format(self.name.lower().replace("_", "-")) + return "{0}".format(normalize_name(self.name)) @property def pipfile_part(self): @@ -123,12 +125,12 @@ class FileRequirement(BaseRequirement): setup_path = attr.ib(default=None) path = attr.ib(default=None, validator=attr.validators.optional(validate_path)) # : path to hit - without any of the VCS prefixes (like git+ / http+ / etc) - editable = attr.ib(default=None) - extras = attr.ib(default=attr.Factory(list)) - uri = attr.ib() - link = attr.ib() - name = attr.ib() - req = attr.ib() + editable = attr.ib(default=False, type=bool) + extras = attr.ib(default=attr.Factory(list), type=list) + uri = attr.ib(type=six.string_types) + link = attr.ib(type=Link) + name = attr.ib(type=six.string_types) + req = attr.ib(type=PkgResourcesRequirement) _has_hashed_name = False _uri_scheme = attr.ib(default=None) @@ -297,7 +299,7 @@ def get_link(self): @req.default def get_requirement(self): - req = init_requirement(canonicalize_name(self.name)) + req = init_requirement(normalize_name(self.name)) req.editable = False req.line = self.link.url_without_fragment if self.path and self.link and self.link.scheme.startswith("file"): @@ -948,7 +950,8 @@ def from_pipfile(cls, name, pipfile): cls_inst.req.req.line = cls_inst.as_line() return cls_inst - def as_line(self, sources=None, include_hashes=True, include_extras=True, as_list=False): + def as_line(self, sources=None, include_hashes=True, include_extras=True, + include_markers=True, as_list=False): """Format this requirement as a line in requirements.txt. If ``sources`` provided, it should be an sequence of mappings, containing @@ -967,7 +970,7 @@ def as_line(self, sources=None, include_hashes=True, include_extras=True, as_lis self.req.line_part, self.extras_as_pip if include_extras else "", self.specifiers if include_specifiers else "", - self.markers_as_pip, + self.markers_as_pip if include_markers else "", ] if as_list: # This is used for passing to a subprocess call diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index cba63295ac..7350d0ac08 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -508,3 +508,15 @@ def fix_requires_python_marker(requires_python): ]) marker_to_add = PackagingRequirement('fakepkg; {0}'.format(marker_str)).marker return marker_to_add + + +def normalize_name(pkg): + """Given a package name, return its normalized, non-canonicalized form. + + :param str pkg: The name of a package + :return: A normalized package name + :rtype: str + """ + + assert isinstance(pkg, six.string_types) + return pkg.replace("_", "-").lower() diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 7a574b9f12..8812d2ae79 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -14,28 +14,28 @@ first==2.0.1 iso8601==0.1.12 jinja2==2.10 markupsafe==1.0 -parse==1.8.4 +parse==1.9.0 pathlib2==2.3.2 scandir==1.9 pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.1.2 -requests==2.19.1 +pythonfinder==1.1.3 +requests==2.20.0 chardet==3.0.4 idna==2.7 - urllib3==1.23 - certifi==2018.8.24 -requirementslib==1.1.9 + urllib3==1.24 + certifi==2018.10.15 +requirementslib==1.1.10 attrs==18.2.0 distlib==0.2.8 packaging==18.0 pyparsing==2.2.2 pytoml==0.1.19 plette==0.2.2 - tomlkit==0.4.4 -shellingham==1.2.6 + tomlkit==0.4.6 +shellingham==1.2.7 six==1.11.0 semver==2.8.1 shutilwhich==1.1.0 @@ -48,3 +48,5 @@ enum34==1.1.6 yaspin==0.14.0 cerberus==1.2 git+https://github.com/sarugaku/passa.git@master#egg=passa +cursor==1.2.0 +backports.functools_lru_cache==1.5 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index fe6f884c74..881985d2a0 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -8,12 +8,14 @@ open_file, temp_environ, temp_path, + spinner, ) from .misc import load_path, partialclass, run, shell_escape -from .path import mkdir_p, rmtree +from .path import mkdir_p, rmtree, create_tracked_tempdir +from .spin import VistirSpinner, create_spinner -__version__ = '0.1.7' +__version__ = '0.1.8' __all__ = [ @@ -31,4 +33,8 @@ "TemporaryDirectory", "NamedTemporaryFile", "partialmethod", + "spinner", + "VistirSpinner", + "create_spinner", + "create_tracked_tempdir" ] diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py index 43470a6ee9..483a479a71 100644 --- a/pipenv/vendor/vistir/backports/tempfile.py +++ b/pipenv/vendor/vistir/backports/tempfile.py @@ -13,7 +13,7 @@ try: from weakref import finalize except ImportError: - from pipenv.vendor.backports.weakref import finalize + from backports.weakref import finalize __all__ = ["finalize", "NamedTemporaryFile"] diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index 0c865fe668..88e5f14b21 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, unicode_literals +import errno import os import sys import warnings @@ -16,21 +17,24 @@ "finalize", "partialmethod", "JSONDecodeError", + "FileNotFoundError", "ResourceWarning", "FileNotFoundError", "fs_str", + "lru_cache", "TemporaryDirectory", "NamedTemporaryFile", ] if sys.version_info >= (3, 5): from pathlib import Path - + from functools import lru_cache else: from pathlib2 import Path + from backports.functools_lru_cache import lru_cache if sys.version_info < (3, 3): - from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from backports.shutil_get_terminal_size import get_terminal_size from .backports.tempfile import NamedTemporaryFile else: from tempfile import NamedTemporaryFile @@ -39,7 +43,7 @@ try: from weakref import finalize except ImportError: - from pipenv.vendor.backports.weakref import finalize + from backports.weakref import finalize try: from functools import partialmethod @@ -57,16 +61,18 @@ class ResourceWarning(Warning): pass class FileNotFoundError(IOError): - pass + """No such file or directory""" + + def __init__(self, *args, **kwargs): + self.errno = errno.ENOENT + super(FileNotFoundError, self).__init__(*args, **kwargs) else: from builtins import ResourceWarning, FileNotFoundError - class ResourceWarning(ResourceWarning): - pass - class FileNotFoundError(FileNotFoundError): - pass +if not sys.warnoptions: + warnings.simplefilter("default", ResourceWarning) class TemporaryDirectory(object): diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index 80f1f897d3..70f95c59b5 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, unicode_literals +import io import os import stat import sys @@ -13,7 +14,9 @@ from .path import is_file_url, is_valid_url, path_to_url, url_to_path -__all__ = ["temp_environ", "temp_path", "cd", "atomic_open_for_write", "open_file"] +__all__ = [ + "temp_environ", "temp_path", "cd", "atomic_open_for_write", "open_file", "spinner" +] # Borrowed from Pew. @@ -77,6 +80,66 @@ def cd(path): os.chdir(prev_cwd) +@contextmanager +def dummy_spinner(spin_type, text, **kwargs): + class FakeClass(object): + def __init__(self, text=""): + self.text = text + + def fail(self, exitcode=1, text=None): + if text: + print(text) + raise SystemExit(exitcode, text) + + def ok(self, text): + print(text) + return 0 + + def write(self, text): + print(text) + + myobj = FakeClass(text) + yield myobj + + +@contextmanager +def spinner(spinner_name=None, start_text=None, handler_map=None, nospin=False): + """Get a spinner object or a dummy spinner to wrap a context. + + :param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"}) + :param str start_text: Text to start off the spinner with (default: {None}) + :param dict handler_map: Handler map for signals to be handled gracefully (default: {None}) + :param bool nospin: If true, use the dummy spinner (default: {False}) + :return: A spinner object which can be manipulated while alive + :rtype: :class:`~vistir.spin.VistirSpinner` + + Raises: + RuntimeError -- Raised if the spinner extra is not installed + """ + + from .spin import create_spinner + spinner_func = create_spinner + if nospin is False: + try: + import yaspin + except ImportError: + raise RuntimeError( + "Failed to import spinner! Reinstall vistir with command:" + " pip install --upgrade vistir[spinner]" + ) + else: + spinner_name = None + if not start_text: + start_text = "Running..." + with spinner_func( + spinner_name=spinner_name, + text=start_text, + handler_map=handler_map, + nospin=nospin, + ) as _spinner: + yield _spinner + + @contextmanager def atomic_open_for_write(target, binary=False, newline=None, encoding=None): """Atomically open `target` for writing. @@ -192,8 +255,11 @@ def open_file(link, session=None): if os.path.isdir(local_path): raise ValueError("Cannot open directory for read: {}".format(link)) else: - with open(local_path, "rb") as local_file: + try: + local_file = io.open(local_path, "rb") yield local_file + finally: + local_file.close() else: # Remote URL headers = {"Accept-Encoding": "identity"} diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 44607a9825..f42b4ad165 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -2,19 +2,24 @@ from __future__ import absolute_import, unicode_literals import json +import logging import locale import os import subprocess import sys from collections import OrderedDict -from contextlib import contextmanager from functools import partial import six from .cmdparse import Script from .compat import Path, fs_str, partialmethod +from .contextmanagers import spinner as spinner + +if os.name != "nt": + class WindowsError(OSError): + pass __all__ = [ @@ -30,6 +35,22 @@ ] +def _get_logger(name=None, level="ERROR"): + if not name: + name = __name__ + if isinstance(level, six.string_types): + level = getattr(logging, level.upper()) + logger = logging.getLogger(name) + logger.setLevel(level) + formatter = logging.Formatter( + "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s" + ) + handler = logging.StreamHandler() + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger + + def shell_escape(cmd): """Escape strings for use in :func:`~subprocess.Popen` and :func:`run`. @@ -75,9 +96,11 @@ def dedup(iterable): return iter(OrderedDict.fromkeys(iterable)) -def _spawn_subprocess(script, env={}, block=True, cwd=None, combine_stderr=True): +def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=True): from distutils.spawn import find_executable + if not env: + env = {} command = find_executable(script.command) options = { "env": env, @@ -102,7 +125,7 @@ def _spawn_subprocess(script, env={}, block=True, cwd=None, combine_stderr=True) try: return subprocess.Popen(cmd, **options) except WindowsError as e: - if e.winerror != 193: + if getattr(e, "winerror", 9999) != 193: raise options["shell"] = True # Try shell mode to use Windows's file association for file launch. @@ -111,7 +134,7 @@ def _spawn_subprocess(script, env={}, block=True, cwd=None, combine_stderr=True) def _create_subprocess( cmd, - env={}, + env=None, block=True, return_object=False, cwd=os.curdir, @@ -120,6 +143,8 @@ def _create_subprocess( combine_stderr=False, display_limit=200 ): + if not env: + env = {} try: c = _spawn_subprocess(cmd, env=env, block=block, cwd=cwd, combine_stderr=combine_stderr) @@ -128,11 +153,13 @@ def _create_subprocess( raise if not block: c.stdin.close() + log_level = "DEBUG" if verbose else "WARN" + logger = _get_logger(cmd._parts[0], level=log_level) output = [] err = [] spinner_orig_text = "" if spinner: - spinner_orig_text = spinner.text + spinner_orig_text = getattr(spinner, "text", "") streams = { "stdout": c.stdout, "stderr": c.stderr @@ -147,7 +174,7 @@ def _create_subprocess( line = to_text(stream.readline()) if not line: continue - line = line.rstrip() + line = to_text("{0}".format(line.rstrip())) if outstream == "stderr": stderr_line = line else: @@ -155,15 +182,24 @@ def _create_subprocess( if not (stdout_line or stderr_line): break if stderr_line: - err.append(line) + err.append(stderr_line) + if verbose: + if spinner: + spinner.write_err(fs_str(stderr_line)) + else: + logger.error(stderr_line) if stdout_line: output.append(stdout_line) display_line = stdout_line if len(stdout_line) > display_limit: display_line = "{0}...".format(stdout_line[:display_limit]) if verbose: - spinner.write(display_line) - spinner.text = "{0} {1}".format(spinner_orig_text, display_line) + if spinner: + spinner.write(fs_str(display_line)) + else: + logger.debug(display_line) + if spinner: + spinner.text = fs_str("{0} {1}".format(spinner_orig_text, display_line)) continue try: c.wait() @@ -175,18 +211,22 @@ def _create_subprocess( if spinner: if c.returncode > 0: spinner.fail("Failed...cleaning up...") - spinner.text = "Complete!" - spinner.ok("✔") + else: + spinner.text = "Complete!" + if not os.name == "nt": + spinner.ok("✔") + else: + spinner.ok() c.out = "\n".join(output) c.err = "\n".join(err) if err else "" else: c.out, c.err = c.communicate() + if not block: + c.wait() + c.out = fs_str("{0}".format(c.out)) if c.out else fs_str("") + c.err = fs_str("{0}".format(c.err)) if c.err else fs_str("") if not return_object: - if not block: - c.wait() - out = c.out if c.out else "" - err = c.err if c.err else "" - return out.strip(), err.strip() + return c.out.strip(), c.err.strip() return c @@ -198,7 +238,7 @@ def run( cwd=None, verbose=False, nospin=False, - spinner=None, + spinner_name=None, combine_stderr=True, display_limit=200 ): @@ -211,7 +251,7 @@ def run( :param str cwd: Current working directory contect to use for spawning the subprocess. :param bool verbose: Whether to print stdout in real time when non-blocking. :param bool nospin: Whether to disable the cli spinner. - :param str spinner: The name of the spinner to use if enabled, defaults to bouncingBar + :param str spinner_name: The name of the spinner to use if enabled, defaults to bouncingBar :param bool combine_stderr: Optionally merge stdout and stderr in the subprocess, false if nonblocking. :param int dispay_limit: The max width of output lines to display when using a spinner. :returns: A 2-tuple of (output, error) or a :class:`subprocess.Popen` object. @@ -221,8 +261,10 @@ def run( this functionality. """ - if not env: - env = os.environ.copy() + _env = os.environ.copy() + if env: + _env.update(env) + env = _env if six.PY2: fs_encode = partial(to_bytes, encoding=locale_encoding) _env = {fs_encode(k): fs_encode(v) for k, v in os.environ.items()} @@ -230,8 +272,8 @@ def run( _env[fs_encode(key)] = fs_encode(val) else: _env = {k: fs_str(v) for k, v in os.environ.items()} - if not spinner: - spinner = "bouncingBar" + if not spinner_name: + spinner_name = "bouncingBar" if six.PY2: if isinstance(cmd, six.string_types): cmd = cmd.encode("utf-8") @@ -241,48 +283,7 @@ def run( cmd = Script.parse(cmd) if block or not return_object: combine_stderr = False - sigmap = {} - if nospin is False: - try: - import signal - from yaspin import yaspin - from yaspin import spinners - from yaspin.signal_handlers import fancy_handler - except ImportError: - raise RuntimeError( - "Failed to import spinner! Reinstall vistir with command:" - " pip install --upgrade vistir[spinner]" - ) - else: - animation = getattr(spinners.Spinners, spinner) - sigmap = { - signal.SIGINT: fancy_handler - } - if os.name == "nt": - sigmap.update({ - signal.CTRL_C_EVENT: fancy_handler, - signal.CTRL_BREAK_EVENT: fancy_handler - }) - spinner_func = yaspin - else: - - @contextmanager - def spinner_func(spin_type, text, **kwargs): - class FakeClass(object): - def __init__(self, text=""): - self.text = text - - def ok(self, text): - return - - def write(self, text): - print(text) - - myobj = FakeClass(text) - yield myobj - - animation = None - with spinner_func(animation, sigmap=sigmap, text="Running...") as sp: + with spinner(spinner_name=spinner_name, start_text="Running...", nospin=nospin) as sp: return _create_subprocess( cmd, env=_env, diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 166282e864..ce7ecee055 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -15,8 +15,7 @@ from six.moves import urllib_parse from six.moves.urllib import request as urllib_request -from .compat import Path, _fs_encoding, TemporaryDirectory -from .misc import locale_encoding, to_bytes, to_text +from .compat import Path, _fs_encoding, TemporaryDirectory, ResourceWarning __all__ = [ @@ -74,6 +73,7 @@ def normalize_drive(path): identified with either upper or lower cased drive names. The case is always converted to uppercase because it seems to be preferred. """ + from .misc import to_text if os.name != "nt" or not isinstance(path, six.string_types): return path @@ -95,6 +95,7 @@ def path_to_url(path): >>> path_to_url("/home/user/code/myrepo/myfile.zip") 'file:///home/user/code/myrepo/myfile.zip' """ + from .misc import to_text, to_bytes if not path: return path @@ -108,6 +109,7 @@ def url_to_path(url): Follows logic taken from pip's equivalent function """ + from .misc import to_bytes assert is_file_url(url), "Only file: urls can be converted to local paths" _, netloc, path, _, _ = urllib_parse.urlsplit(url) # Netlocs are UNC paths @@ -120,14 +122,16 @@ def url_to_path(url): def is_valid_url(url): """Checks if a given string is an url""" + from .misc import to_text if not url: return url - pieces = urllib_parse.urlparse(url) + pieces = urllib_parse.urlparse(to_text(url)) return all([pieces.scheme, pieces.netloc]) def is_file_url(url): """Returns true if the given url is a file url""" + from .misc import to_text if not url: return False if not isinstance(url, six.string_types): @@ -144,6 +148,7 @@ def is_readonly_path(fn): Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)` """ + from .misc import to_bytes fn = to_bytes(fn, encoding="utf-8") if os.path.exists(fn): return bool(os.stat(fn).st_mode & stat.S_IREAD) and not os.access(fn, os.W_OK) @@ -158,7 +163,8 @@ def mkdir_p(newdir, mode=0o777): :raises: OSError if a file is encountered along the way """ # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ - newdir = abspathu(to_bytes(newdir, "utf-8")) + from .misc import to_bytes, to_text + newdir = to_bytes(newdir, "utf-8") if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( @@ -166,17 +172,17 @@ def mkdir_p(newdir, mode=0o777): newdir ) ) - pass else: - head, tail = os.path.split(newdir) + head, tail = os.path.split(to_bytes(newdir, encoding="utf-8")) # Make sure the tail doesn't point to the asame place as the head - tail_and_head_match = os.path.relpath(tail, start=os.path.basename(head)) == "." + curdir = to_bytes(".", encoding="utf-8") + tail_and_head_match = os.path.relpath(tail, start=os.path.basename(head)) == curdir if tail and not tail_and_head_match and not os.path.isdir(newdir): target = os.path.join(head, tail) if os.path.exists(target) and os.path.isfile(target): raise OSError( "A file with the same name as the desired dir, '{0}', already exists.".format( - newdir + to_text(newdir, encoding="utf-8") ) ) os.makedirs(os.path.join(head, tail), mode) @@ -210,9 +216,11 @@ def create_tracked_tempdir(*args, **kwargs): The return value is the path to the created directory. """ + tempdir = TemporaryDirectory(*args, **kwargs) TRACKED_TEMPORARY_DIRECTORIES.append(tempdir) atexit.register(tempdir.cleanup) + warnings.simplefilter("default", ResourceWarning) return tempdir.name @@ -223,6 +231,7 @@ def set_write_bit(fn): :param str fn: The target filename or path """ + from .misc import to_bytes, locale_encoding fn = to_bytes(fn, encoding=locale_encoding) if not os.path.exists(fn): return @@ -243,10 +252,17 @@ def rmtree(directory, ignore_errors=False): Setting `ignore_errors=True` may cause this to silently fail to delete the path """ + from .misc import locale_encoding, to_bytes directory = to_bytes(directory, encoding=locale_encoding) - shutil.rmtree( - directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly - ) + try: + shutil.rmtree( + directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly + ) + except (IOError, OSError) as exc: + # Ignore removal failures where the file doesn't exist + if exc.errno == errno.ENOENT: + pass + raise def handle_remove_readonly(func, path, exc): @@ -263,35 +279,41 @@ def handle_remove_readonly(func, path, exc): :func:`set_write_bit` on the target path and try again. """ # Check for read-only attribute - from .compat import ResourceWarning + if six.PY2: + from .compat import ResourceWarning + from .misc import to_bytes + PERM_ERRORS = (errno.EACCES, errno.EPERM) default_warning_message = ( "Unable to remove file due to permissions restriction: {!r}" ) # split the initial exception out into its type, exception, and traceback exc_type, exc_exception, exc_tb = exc - path = to_bytes(path) + path = to_bytes(path, encoding="utf-8") if is_readonly_path(path): # Apply write permission and call original function set_write_bit(path) try: func(path) except (OSError, IOError) as e: - if e.errno in [errno.EACCES, errno.EPERM]: - warnings.warn( - default_warning_message.format( - to_text(path, encoding=locale_encoding) - ), ResourceWarning - ) + if e.errno in PERM_ERRORS: + warnings.warn(default_warning_message.format(path), ResourceWarning) return - if exc_exception.errno in [errno.EACCES, errno.EPERM]: - warnings.warn( - default_warning_message.format(to_text(path)), - ResourceWarning - ) - return - - raise + if exc_exception.errno in PERM_ERRORS: + set_write_bit(path) + try: + func(path) + except (OSError, IOError) as e: + if e.errno in PERM_ERRORS: + warnings.warn(default_warning_message.format(path), ResourceWarning) + elif e.errno == errno.ENOENT: # File already gone + return + else: + raise + return + else: + raise + raise exc def walk_up(bottom): @@ -356,6 +378,7 @@ def get_converted_relative_path(path, relative_to=None): >>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder') '.' """ + from .misc import to_text, to_bytes # noqa if not relative_to: relative_to = os.getcwdu() if six.PY2 else os.getcwd() diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py new file mode 100644 index 0000000000..d2cddd7947 --- /dev/null +++ b/pipenv/vendor/vistir/spin.py @@ -0,0 +1,149 @@ +# -*- coding=utf-8 -*- +import os +import signal +import sys + +from .termcolors import colored +from .compat import fs_str + +import cursor +import functools +try: + import yaspin +except ImportError: + yaspin = None + Spinners = None +else: + from yaspin.spinners import Spinners + +handler = None +if yaspin and os.name == "nt": + handler = yaspin.signal_handlers.default_handler +elif yaspin and os.name != "nt": + handler = yaspin.signal_handlers.fancy_handler + +CLEAR_LINE = chr(27) + "[K" + + +class DummySpinner(object): + def __init__(self, text="", **kwargs): + self.text = text + + def __enter__(self): + if self.text: + self.write(self.text) + return self + + def __exit__(self, exc_type, exc_val, traceback): + if not exc_type: + self.ok() + else: + self.write_err(traceback) + return False + + def fail(self, exitcode=1, text=None): + if text: + self.write_err(text) + raise SystemExit(exitcode, text) + + def ok(self, text=None): + if text: + self.write(self.text) + return 0 + + def write(self, text=None): + if text: + line = fs_str("{0}\n".format(text)) + sys.stdout.write(line) + + def write_err(self, text=None): + if text: + line = fs_str("{0}\n".format(text)) + sys.stderr.write(line) + + +base_obj = yaspin.core.Yaspin if yaspin is not None else DummySpinner + + +class VistirSpinner(base_obj): + def __init__(self, *args, **kwargs): + """Get a spinner object or a dummy spinner to wrap a context. + + Keyword Arguments: + :param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"}) + :param str start_text: Text to start off the spinner with (default: {None}) + :param dict handler_map: Handler map for signals to be handled gracefully (default: {None}) + :param bool nospin: If true, use the dummy spinner (default: {False}) + """ + + self.handler = handler + sigmap = {} + if handler: + sigmap.update({ + signal.SIGINT: handler, + signal.SIGTERM: handler + }) + handler_map = kwargs.pop("handler_map", {}) + if os.name == "nt": + sigmap[signal.SIGBREAK] = handler + else: + sigmap[signal.SIGALRM] = handler + if handler_map: + sigmap.update(handler_map) + spinner_name = kwargs.pop("spinner_name", "bouncingBar") + text = kwargs.pop("start_text", "") + " " + kwargs.pop("text", "") + if not text: + text = "Running..." + kwargs["sigmap"] = sigmap + kwargs["spinner"] = getattr(Spinners, spinner_name, Spinners.bouncingBar) + super(VistirSpinner, self).__init__(*args, **kwargs) + self.is_dummy = bool(yaspin is None) + + def fail(self, exitcode=1, *args, **kwargs): + super(VistirSpinner, self).fail(**kwargs) + + def ok(self, *args, **kwargs): + super(VistirSpinner, self).ok(*args, **kwargs) + + def write(self, *args, **kwargs): + super(VistirSpinner, self).write(*args, **kwargs) + + def write_err(self, text): + """Write error text in the terminal without breaking the spinner.""" + + sys.stderr.write("\r") + self._clear_err() + text = fs_str("{0}\n".format(text)) + sys.stderr.write(text) + + def _compose_color_func(self): + fn = functools.partial( + colored, + color=self._color, + on_color=self._on_color, + attrs=list(self._attrs), + ) + return fn + + @staticmethod + def _hide_cursor(): + cursor.hide() + + @staticmethod + def _show_cursor(): + cursor.show() + + @staticmethod + def _clear_err(): + sys.stderr.write(CLEAR_LINE) + + @staticmethod + def _clear_line(): + sys.stdout.write(CLEAR_LINE) + + +def create_spinner(*args, **kwargs): + nospin = kwargs.pop("nospin", False) + if nospin: + return DummySpinner(*args, **kwargs) + return VistirSpinner(*args, **kwargs) diff --git a/pipenv/vendor/vistir/termcolors.py b/pipenv/vendor/vistir/termcolors.py new file mode 100644 index 0000000000..6f3ad32cf7 --- /dev/null +++ b/pipenv/vendor/vistir/termcolors.py @@ -0,0 +1,116 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import, print_function, unicode_literals +import colorama +import os + + +ATTRIBUTES = dict( + list(zip([ + 'bold', + 'dark', + '', + 'underline', + 'blink', + '', + 'reverse', + 'concealed' + ], + list(range(1, 9)) + )) + ) +del ATTRIBUTES[''] + + +HIGHLIGHTS = dict( + list(zip([ + 'on_grey', + 'on_red', + 'on_green', + 'on_yellow', + 'on_blue', + 'on_magenta', + 'on_cyan', + 'on_white' + ], + list(range(40, 48)) + )) + ) + + +COLORS = dict( + list(zip([ + 'grey', + 'red', + 'green', + 'yellow', + 'blue', + 'magenta', + 'cyan', + 'white', + ], + list(range(30, 38)) + )) + ) + + +RESET = colorama.Style.RESET_ALL + + +def colored(text, color=None, on_color=None, attrs=None): + """Colorize text using a reimplementation of the colorizer from + https://github.com/pavdmyt/yaspin so that it works on windows. + + Available text colors: + red, green, yellow, blue, magenta, cyan, white. + + Available text highlights: + on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white. + + Available attributes: + bold, dark, underline, blink, reverse, concealed. + + Example: + colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink']) + colored('Hello, World!', 'green') + """ + if os.getenv('ANSI_COLORS_DISABLED') is None: + style = "NORMAL" + if 'bold' in attrs: + style = "BRIGHT" + attrs.remove('bold') + if color is not None: + text = text = "%s%s%s%s%s" % ( + getattr(colorama.Fore, color), + getattr(colorama.Style, style), + text, + colorama.Fore.RESET, + colorama.Style.NORMAL, + ) + + if on_color is not None: + text = "%s%s%s%s" % ( + getattr(colorama.Back, color), + text, + colorama.Back.RESET, + colorama.Style.NORMAL, + ) + + if attrs is not None: + fmt_str = "%s[%%dm%%s%s[9m" % ( + chr(27), + chr(27) + ) + for attr in attrs: + text = fmt_str % (ATTRIBUTES[attr], text) + + text += RESET + return text + + +def cprint(text, color=None, on_color=None, attrs=None, **kwargs): + """Print colorize text. + + It accepts arguments of print function. + """ + + print((colored(text, color, on_color, attrs)), **kwargs)