diff --git a/.style.yapf b/.style.yapf new file mode 120000 index 00000000000..ef4a6ab8e8a --- /dev/null +++ b/.style.yapf @@ -0,0 +1 @@ +readthedocs-common/style.yapf \ No newline at end of file diff --git a/docs/yaml-config.rst b/docs/yaml-config.rst index 6fce83495db..9a89c5b2a0e 100644 --- a/docs/yaml-config.rst +++ b/docs/yaml-config.rst @@ -119,11 +119,11 @@ This is the version of Python to use when building your documentation. If you specify only the major version of Python, the highest supported minor version will be selected. -.. warning:: +.. warning:: The supported Python versions depends on the version of the build image your project is using. The default build image that is used to build documentation - contains support for Python ``2.7`` and ``3.5``. + contains support for Python ``2.7`` and ``3.5``. See the :ref:`yaml_build_image` for more information on supported Python versions. .. code-block:: yaml diff --git a/readthedocs-common b/readthedocs-common index 9d766e1c076..b0dd462252a 160000 --- a/readthedocs-common +++ b/readthedocs-common @@ -1 +1 @@ -Subproject commit 9d766e1c076140368baf1c706ef1335ba51c7a17 +Subproject commit b0dd462252acd151f29ce490adfbe50a731360f0 diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py index ba439a66707..926e5b4b5d7 100644 --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -7,15 +7,15 @@ import logging import os.path import re -from builtins import object from shutil import rmtree +from builtins import object from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from django.utils.encoding import python_2_unicode_compatible -from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext +from django.utils.translation import ugettext_lazy as _ from guardian.shortcuts import assign from taggit.managers import TaggableManager @@ -35,7 +35,10 @@ from .version_slug import VersionSlugField DEFAULT_VERSION_PRIVACY_LEVEL = getattr( - settings, 'DEFAULT_VERSION_PRIVACY_LEVEL', 'public') + settings, + 'DEFAULT_VERSION_PRIVACY_LEVEL', + 'public', +) log = logging.getLogger(__name__) @@ -75,7 +78,10 @@ class Version(models.Model): #: filesystem to determine how the paths for this version are called. It #: must not be used for any other identifying purposes. slug = VersionSlugField( - _('Slug'), max_length=255, populate_from='verbose_name') + _('Slug'), + max_length=255, + populate_from='verbose_name', + ) supported = models.BooleanField(_('Supported'), default=True) active = models.BooleanField(_('Active'), default=False) @@ -99,7 +105,8 @@ class Meta(object): permissions = ( # Translators: Permission around whether a user can view the # version - ('view_version', _('View Version')),) + ('view_version', _('View Version')), + ) def __str__(self): return ugettext( @@ -107,7 +114,8 @@ def __str__(self): version=self.verbose_name, project=self.project, pk=self.pk, - )) + ), + ) @property def commit_name(self): @@ -148,7 +156,9 @@ def commit_name(self): # If we came that far it's not a special version nor a branch or tag. # Therefore just return the identifier to make a safe guess. - log.debug('TODO: Raise an exception here. Testing what cases it happens') + log.debug( + 'TODO: Raise an exception here. Testing what cases it happens', + ) return self.identifier def get_absolute_url(self): @@ -162,7 +172,9 @@ def get_absolute_url(self): ) private = self.privacy_level == PRIVATE return self.project.get_docs_url( - version_slug=self.slug, private=private) + version_slug=self.slug, + private=private, + ) def save(self, *args, **kwargs): # pylint: disable=arguments-differ """Add permissions to the Version for all owners on save.""" @@ -175,7 +187,10 @@ def save(self, *args, **kwargs): # pylint: disable=arguments-differ except Exception: log.exception('failed to sync supported versions') broadcast( - type='app', task=tasks.symlink_project, args=[self.project.pk]) + type='app', + task=tasks.symlink_project, + args=[self.project.pk], + ) return obj def delete(self, *args, **kwargs): # pylint: disable=arguments-differ @@ -183,7 +198,10 @@ def delete(self, *args, **kwargs): # pylint: disable=arguments-differ log.info('Removing files for version %s', self.slug) broadcast(type='app', task=tasks.clear_artifacts, args=[self.pk]) broadcast( - type='app', task=tasks.symlink_project, args=[self.project.pk]) + type='app', + task=tasks.symlink_project, + args=[self.project.pk], + ) super(Version, self).delete(*args, **kwargs) @property @@ -209,19 +227,27 @@ def get_downloads(self, pretty=False): data['PDF'] = project.get_production_media_url('pdf', self.slug) if project.has_htmlzip(self.slug): data['HTML'] = project.get_production_media_url( - 'htmlzip', self.slug) + 'htmlzip', + self.slug, + ) if project.has_epub(self.slug): data['Epub'] = project.get_production_media_url( - 'epub', self.slug) + 'epub', + self.slug, + ) else: if project.has_pdf(self.slug): data['pdf'] = project.get_production_media_url('pdf', self.slug) if project.has_htmlzip(self.slug): data['htmlzip'] = project.get_production_media_url( - 'htmlzip', self.slug) + 'htmlzip', + self.slug, + ) if project.has_epub(self.slug): data['epub'] = project.get_production_media_url( - 'epub', self.slug) + 'epub', + self.slug, + ) return data def get_conf_py_path(self): @@ -247,13 +273,18 @@ def clean_build_path(self): try: path = self.get_build_path() if path is not None: - log.debug('Removing build path {0} for {1}'.format(path, self)) + log.debug('Removing build path %s for %s', path, self) rmtree(path) except OSError: log.exception('Build path cleanup failed') def get_github_url( - self, docroot, filename, source_suffix='.rst', action='view'): + self, + docroot, + filename, + source_suffix='.rst', + action='view', + ): """ Return a GitHub URL for a given filename. @@ -295,7 +326,12 @@ def get_github_url( ) def get_gitlab_url( - self, docroot, filename, source_suffix='.rst', action='view'): + self, + docroot, + filename, + source_suffix='.rst', + action='view', + ): repo_url = self.project.repo if 'gitlab' not in repo_url: return '' @@ -391,10 +427,17 @@ class VersionAlias(models.Model): """Alias for a ``Version``.""" project = models.ForeignKey( - Project, verbose_name=_('Project'), related_name='aliases') + Project, + verbose_name=_('Project'), + related_name='aliases', + ) from_slug = models.CharField(_('From slug'), max_length=255, default='') to_slug = models.CharField( - _('To slug'), max_length=255, default='', blank=True) + _('To slug'), + max_length=255, + default='', + blank=True, + ) largest = models.BooleanField(_('Largest'), default=False) def __str__(self): @@ -403,7 +446,8 @@ def __str__(self): project=self.project, _from=self.from_slug, to=self.to_slug, - )) + ), + ) @python_2_unicode_compatible @@ -412,13 +456,28 @@ class Build(models.Model): """Build data.""" project = models.ForeignKey( - Project, verbose_name=_('Project'), related_name='builds') + Project, + verbose_name=_('Project'), + related_name='builds', + ) version = models.ForeignKey( - Version, verbose_name=_('Version'), null=True, related_name='builds') + Version, + verbose_name=_('Version'), + null=True, + related_name='builds', + ) type = models.CharField( - _('Type'), max_length=55, choices=BUILD_TYPES, default='html') + _('Type'), + max_length=55, + choices=BUILD_TYPES, + default='html', + ) state = models.CharField( - _('State'), max_length=55, choices=BUILD_STATE, default='finished') + _('State'), + max_length=55, + choices=BUILD_STATE, + default='finished', + ) date = models.DateTimeField(_('Date'), auto_now_add=True) success = models.BooleanField(_('Success'), default=True) @@ -428,15 +487,25 @@ class Build(models.Model): error = models.TextField(_('Error'), default='', blank=True) exit_code = models.IntegerField(_('Exit code'), null=True, blank=True) commit = models.CharField( - _('Commit'), max_length=255, null=True, blank=True) + _('Commit'), + max_length=255, + null=True, + blank=True, + ) length = models.IntegerField(_('Build Length'), null=True, blank=True) builder = models.CharField( - _('Builder'), max_length=255, null=True, blank=True) + _('Builder'), + max_length=255, + null=True, + blank=True, + ) cold_storage = models.NullBooleanField( - _('Cold Storage'), help_text='Build steps stored outside the database.') + _('Cold Storage'), + help_text='Build steps stored outside the database.', + ) # Manager @@ -455,7 +524,8 @@ def __str__(self): self.project.users.all().values_list('username', flat=True), ), pk=self.pk, - )) + ), + ) @models.permalink def get_absolute_url(self): @@ -497,7 +567,10 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model): """Build command for a ``Build``.""" build = models.ForeignKey( - Build, verbose_name=_('Build'), related_name='commands') + Build, + verbose_name=_('Build'), + related_name='commands', + ) command = models.TextField(_('Command')) description = models.TextField(_('Description'), blank=True) @@ -516,7 +589,8 @@ class Meta(object): def __str__(self): return ( ugettext('Build command {pk} for build {build}') - .format(pk=self.pk, build=self.build)) + .format(pk=self.pk, build=self.build) + ) @property def run_time(self): diff --git a/readthedocs/builds/static/builds/js/detail.js b/readthedocs/builds/static/builds/js/detail.js index d4ba4d71e91..ee95682b4ae 100644 --- a/readthedocs/builds/static/builds/js/detail.js +++ b/readthedocs/builds/static/builds/js/detail.js @@ -1 +1 @@ -require=function(){function e(t,r,o){function n(i,u){if(!r[i]){if(!t[i]){var a="function"==typeof require&&require;if(!u&&a)return a(i,!0);if(s)return s(i,!0);var c=new Error("Cannot find module '"+i+"'");throw c.code="MODULE_NOT_FOUND",c}var d=r[i]={exports:{}};t[i][0].call(d.exports,function(e){var r=t[i][1][e];return n(r?r:e)},d,d.exports,e,t,r,o)}return r[i].exports}for(var s="function"==typeof require&&require,i=0;i max_date: - log.warning('{0} is newer than {1}'.format( - latest_build, max_date)) + log.warning( + '%s is newer than %s', + latest_build, + max_date, + ) path = version.get_build_path() if path is not None: log.info( - ('Found stale build path for {0} ' - 'at {1}, last used on {2}').format( - version, path, latest_build.date)) + 'Found stale build path for %s at %s, last used on %s', + version, + path, + latest_build.date, + ) if not options['dryrun']: version.clean_build_path() except Version.DoesNotExist: diff --git a/readthedocs/core/management/commands/reindex_elasticsearch.py b/readthedocs/core/management/commands/reindex_elasticsearch.py index a2bce6df840..3d8dcfa5a03 100644 --- a/readthedocs/core/management/commands/reindex_elasticsearch.py +++ b/readthedocs/core/management/commands/reindex_elasticsearch.py @@ -1,12 +1,14 @@ -"""Reindex Elastic Search indexes""" +# -*- coding: utf-8 -*- +"""Reindex Elastic Search indexes.""" + +from __future__ import ( + absolute_import, division, print_function, unicode_literals) -from __future__ import absolute_import import logging from optparse import make_option -from django.core.management.base import BaseCommand -from django.core.management.base import CommandError from django.conf import settings +from django.core.management.base import BaseCommand, CommandError from readthedocs.builds.constants import LATEST from readthedocs.builds.models import Version @@ -19,14 +21,11 @@ class Command(BaseCommand): help = __doc__ option_list = BaseCommand.option_list + ( - make_option('-p', - dest='project', - default='', - help='Project to index'), + make_option('-p', dest='project', default='', help='Project to index'), ) def handle(self, *args, **options): - """Build/index all versions or a single project's version""" + """Build/index all versions or a single project's version.""" project = options['project'] queryset = Version.objects.all() @@ -35,13 +34,14 @@ def handle(self, *args, **options): queryset = queryset.filter(project__slug=project) if not queryset.exists(): raise CommandError( - 'No project with slug: {slug}'.format(slug=project)) - log.info("Building all versions for %s", project) + 'No project with slug: {slug}'.format(slug=project), + ) + log.info('Building all versions for %s', project) elif getattr(settings, 'INDEX_ONLY_LATEST', True): queryset = queryset.filter(slug=LATEST) for version in queryset: - log.info("Reindexing %s", version) + log.info('Reindexing %s', version) try: commit = version.project.vcs_repo(version.slug).commit except: # pylint: disable=bare-except @@ -50,7 +50,6 @@ def handle(self, *args, **options): commit = None try: - update_search(version.pk, commit, - delete_non_commit_files=False) + update_search(version.pk, commit, delete_non_commit_files=False) except Exception: - log.exception('Reindex failed for {}'.format(version)) + log.exception('Reindex failed for %s', version) diff --git a/readthedocs/core/signals.py b/readthedocs/core/signals.py index aa2a30936c0..4c9aebfbc48 100644 --- a/readthedocs/core/signals.py +++ b/readthedocs/core/signals.py @@ -1,18 +1,19 @@ +# -*- coding: utf-8 -*- """Signal handling for core app.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) import logging from corsheaders import signals from django.conf import settings +from django.db.models import Count, Q from django.db.models.signals import pre_delete -from django.dispatch import Signal -from django.db.models import Q, Count -from django.dispatch import receiver +from django.dispatch import Signal, receiver from future.backports.urllib.parse import urlparse -from readthedocs.projects.models import Project, Domain +from readthedocs.projects.models import Domain, Project log = logging.getLogger(__name__) @@ -23,7 +24,6 @@ '/api/v2/sustainability', ] - webhook_github = Signal(providing_args=['project', 'data', 'event']) webhook_gitlab = Signal(providing_args=['project', 'data', 'event']) webhook_bitbucket = Signal(providing_args=['project', 'data', 'event']) @@ -57,16 +57,15 @@ def decide_if_cors(sender, request, **kwargs): # pylint: disable=unused-argumen project = Project.objects.get(slug=project_slug) except Project.DoesNotExist: log.warning( - 'Invalid project passed to domain. [{project}:{domain}'.format( - project=project_slug, - domain=host, - ) + 'Invalid project passed to domain. [%s:%s]', + project_slug, + host, ) return False domain = Domain.objects.filter( Q(domain__icontains=host), - Q(project=project) | Q(project__subprojects__child=project) + Q(project=project) | Q(project__subprojects__child=project), ) if domain.exists(): return True @@ -78,12 +77,16 @@ def decide_if_cors(sender, request, **kwargs): # pylint: disable=unused-argumen def delete_projects_and_organizations(sender, instance, *args, **kwargs): # Here we count the owner list from the projects that the user own # Then exclude the projects where there are more than one owner - projects = instance.projects.all().annotate(num_users=Count('users')).exclude(num_users__gt=1) + projects = instance.projects.all().annotate( + num_users=Count('users'), + ).exclude(num_users__gt=1) # Here we count the users list from the organization that the user belong # Then exclude the organizations where there are more than one user - oauth_organizations = (instance.oauth_organizations.annotate(num_users=Count('users')) - .exclude(num_users__gt=1)) + oauth_organizations = ( + instance.oauth_organizations.annotate(num_users=Count('users')) + .exclude(num_users__gt=1) + ) projects.delete() oauth_organizations.delete() diff --git a/readthedocs/core/static/core/font/fontawesome-webfont.svg b/readthedocs/core/static/core/font/fontawesome-webfont.svg index 855c845e538..52c0773359b 100644 --- a/readthedocs/core/static/core/font/fontawesome-webfont.svg +++ b/readthedocs/core/static/core/font/fontawesome-webfont.svg @@ -8,7 +8,7 @@ Copyright Dave Gandy 2016. All rights reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/readthedocs/core/static/core/js/autocomplete.js b/readthedocs/core/static/core/js/autocomplete.js index 3acb82f5128..c97ed99cad7 100644 --- a/readthedocs/core/static/core/js/autocomplete.js +++ b/readthedocs/core/static/core/js/autocomplete.js @@ -1 +1 @@ -require=function(){function r(e,t,u){function o(i,c){if(!t[i]){if(!e[i]){var f="function"==typeof require&&require;if(!c&&f)return f(i,!0);if(n)return n(i,!0);var p=new Error("Cannot find module '"+i+"'");throw p.code="MODULE_NOT_FOUND",p}var a=t[i]={exports:{}};e[i][0].call(a.exports,function(r){var t=e[i][1][r];return o(t?t:r)},a,a.exports,r,e,t,u)}return t[i].exports}for(var n="function"==typeof require&&require,i=0;i"),t(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var r=t(this);expand=t(''),expand.on("click",function(t){return e.toggleCurrent(r),t.stopPropagation(),!1}),r.prepend(expand)})},t.reset=function(){var t=encodeURI(window.location.hash);if(t)try{var e=$(".wy-menu-vertical").find('[href="'+t+'"]');if(0===e.length){var r=$('.document a[href="'+t+'"]'),i=r.closest("div.section");e=$(".wy-menu-vertical").find('[href="#'+i.attr("id")+'"]')}$(".wy-menu-vertical li.toctree-l1 li.current").removeClass("current"),e.closest("li.toctree-l2").addClass("current"),e.closest("li.toctree-l3").addClass("current"),e.closest("li.toctree-l4").addClass("current")}catch(n){console.log("Error expanding nav for anchor",n)}},t.onScroll=function(){this.winScroll=!1;var t=this.win.scrollTop(),e=t+this.winHeight,r=this.navBar.scrollTop(),i=r+(t-this.winPosition);t<0||e>this.docHeight||(this.navBar.scrollTop(i),this.winPosition=t)},t.onResize=function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},t.hashChange=function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},t.toggleCurrent=function(t){var e=t.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")},t}var n="undefined"!=typeof window?window.jQuery:t("jquery");e.exports.ThemeNav=i(),"undefined"!=typeof window&&(window.SphinxRtdTheme={StickyNav:e.exports.ThemeNav})},{jquery:"jquery"}],7:[function(t,e,r){function i(){return{a:["target","href","title"],abbr:["title"],address:[],area:["shape","coords","href","alt"],article:[],aside:[],audio:["autoplay","controls","loop","preload","src"],b:[],bdi:["dir"],bdo:["dir"],big:[],blockquote:["cite"],br:[],caption:[],center:[],cite:[],code:[],col:["align","valign","span","width"],colgroup:["align","valign","span","width"],dd:[],del:["datetime"],details:["open"],div:[],dl:[],dt:[],em:[],font:["color","size","face"],footer:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],header:[],hr:[],i:[],img:["src","alt","title","width","height"],ins:["datetime"],li:[],mark:[],nav:[],ol:[],p:[],pre:[],s:[],section:[],small:[],span:[],sub:[],sup:[],strong:[],table:["width","border","align","valign"],tbody:["align","valign"],td:["width","rowspan","colspan","align","valign"],tfoot:["align","valign"],th:["width","rowspan","colspan","align","valign"],thead:["align","valign"],tr:["rowspan","align","valign"],tt:[],u:[],ul:[],video:["autoplay","controls","loop","preload","src","height","width"]}}function n(t,e,r){}function o(t,e,r){}function a(t,e,r){}function s(t,e,r){}function l(t){return t.replace(E,"<").replace(S,">")}function c(t,e,r,i){if(r=g(r),"href"===e||"src"===e){if(r=T.trim(r),"#"===r)return"#";if("http://"!==r.substr(0,7)&&"https://"!==r.substr(0,8)&&"mailto:"!==r.substr(0,7)&&"tel:"!==r.substr(0,4)&&"#"!==r[0]&&"/"!==r[0])return""}else if("background"===e){if(I.lastIndex=0,I.test(r))return""}else if("style"===e){if($.lastIndex=0,$.test(r))return"";if(H.lastIndex=0,H.test(r)&&(I.lastIndex=0,I.test(r)))return"";i!==!1&&(i=i||k,r=i.process(r))}return r=m(r)}function u(t){return t.replace(A,""")}function d(t){return t.replace(C,'"')}function f(t){return t.replace(R,function(t,e){return"x"===e[0]||"X"===e[0]?String.fromCharCode(parseInt(e.substr(1),16)):String.fromCharCode(parseInt(e,10))})}function p(t){return t.replace(j,":").replace(O," ")}function h(t){for(var e="",r=0,i=t.length;r/g,A=/"/g,C=/"/g,R=/&#([a-zA-Z0-9]*);?/gim,j=/:?/gim,O=/&newline;?/gim,I=/((j\s*a\s*v\s*a|v\s*b|l\s*i\s*v\s*e)\s*s\s*c\s*r\s*i\s*p\s*t\s*|m\s*o\s*c\s*h\s*a)\:/gi,$=/e\s*x\s*p\s*r\s*e\s*s\s*s\s*i\s*o\s*n\s*\(.*/gi,H=/u\s*r\s*l\s*\(.*/gi,L=//g;r.whiteList=i(),r.getDefaultWhiteList=i,r.onTag=n,r.onIgnoreTag=o,r.onTagAttr=a,r.onIgnoreTagAttr=s,r.safeAttrValue=c,r.escapeHtml=l,r.escapeQuote=u,r.unescapeQuote=d,r.escapeHtmlEntities=f,r.escapeDangerHtml5Entities=p,r.clearNonPrintableCharacter=h,r.friendlyAttrValue=g,r.escapeAttrValue=m,r.onIgnoreTagStripAll=v,r.StripTagBody=b,r.stripCommentTag=w,r.stripBlankChar=y,r.cssFilter=k,r.getDefaultCSSWhiteList=_},{"./util":10,cssfilter:3}],8:[function(t,e,r){function i(t,e){var r=new a(e);return r.process(t)}var n=t("./default"),o=t("./parser"),a=t("./xss");r=e.exports=i,r.FilterXSS=a;for(var s in n)r[s]=n[s];for(var s in o)r[s]=o[s];"undefined"!=typeof window&&(window.filterXSS=e.exports)},{"./default":7,"./parser":9,"./xss":11}],9:[function(t,e,r){function i(t){var e=d.spaceIndex(t);if(e===-1)var r=t.slice(1,-1);else var r=t.slice(1,e+1);return r=d.trim(r).toLowerCase(),"/"===r.slice(0,1)&&(r=r.slice(1)),"/"===r.slice(-1)&&(r=r.slice(0,-1)),r}function n(t){return""===p){o+=r(t.slice(a,s)),d=t.slice(s,c+1),f=i(d),o+=e(s,o.length,f,d,n(d)),a=c+1,s=!1;continue}if(('"'===p||"'"===p)&&"="===t.charAt(c-1)){l=p;continue}}else if(p===l){l=!1;continue}}return a0;e--){var r=t[e];if(" "!==r)return"="===r?e:-1}}function c(t){return'"'===t[0]&&'"'===t[t.length-1]||"'"===t[0]&&"'"===t[t.length-1]}function u(t){return c(t)?t.substr(1,t.length-2):t}var d=t("./util"),f=/[^a-zA-Z0-9_:\.\-]/gim;r.parseTag=o,r.parseAttr=a},{"./util":10}],10:[function(t,e,r){e.exports={indexOf:function(t,e){var r,i;if(Array.prototype.indexOf)return t.indexOf(e);for(r=0,i=t.length;r";var w=n(l),y=o[r],x=d(w.html,function(t,e){var n=f.indexOf(y,t)!==-1,o=c(r,t,e,n);if(!i(o))return o;if(n)return e=h(r,t,e,m),e?t+'="'+e+'"':t;var o=p(r,t,e,n);return i(o)?void 0:o}),l="<"+r;return x&&(l+=" "+x),w.closing&&(l+=" /"),l+=">"}var b=s(r,l,v);return i(b)?g(l):b},g);return v&&(b=v.remove(b)),b},e.exports=a},{"./default":7,"./parser":9,"./util":10,cssfilter:3}],12:[function(t,e,r){var r={THEME_RTD:"sphinx_rtd_theme",THEME_ALABASTER:"alabaster",THEME_CELERY:"sphinx_celery"};r.PROMO_SUPPORTED_THEMES=[r.THEME_RTD,r.THEME_ALABASTER,r.THEME_CELERY],r.PROMO_TYPES={LEFTNAV:"doc",FOOTER:"site-footer"},e.exports=r},{}],13:[function(t,e,r){function i(t){var e=a.get();e.is_rtd_theme()?$("div.rst-other-versions").html(t.html):$("body").append(t.html),t.version_active?!t.version_supported:$(".rst-current-version").addClass("rst-out-of-date")}function n(){function t(t){return/^(GET|HEAD|OPTIONS|TRACE)$/.test(t)}$.ajaxSetup({beforeSend:function(e,r){t(r.type)||e.setRequestHeader("X-CSRFToken",$("a.bookmark[token]").attr("token"))}})}function o(){var t=a.get(),e={project:t.project,version:t.version,page:t.page,theme:t.get_theme_name(),format:"jsonp"};"docroot"in t&&(e.docroot=t.docroot),"source_suffix"in t&&(e.source_suffix=t.source_suffix),0===window.location.pathname.indexOf("/projects/")&&(e.subproject=!0),$.ajax({url:t.api_host+"/api/v2/footer_html/",crossDomain:!0,xhrFields:{withCredentials:!0},dataType:"jsonp",data:e,success:function(t){s.init(t.version_compare),i(t),n()},error:function(){console.error("Error loading Read the Docs footer")}})}var a=t("./rtd-data"),s=t("./version-compare");e.exports={init:o}},{"./rtd-data":14,"./version-compare":18}],14:[function(t,e,r){function i(){var t=Object.create(o),e={api_host:"https://readthedocs.org"};return $.extend(t,e,window.READTHEDOCS_DATA),t}var n=t("./constants"),o={is_rtd_theme:function(){return this.get_theme_name()===n.THEME_RTD},theme_supports_promo:function(){return n.PROMO_SUPPORTED_THEMES.indexOf(this.get_theme_name())>-1},is_sphinx_builder:function(){return!("builder"in this)||"mkdocs"!==this.builder},get_theme_name:function(){return this.theme!==n.THEME_RTD&&1===$("div.rst-other-versions").length?n.THEME_RTD:this.theme},show_promo:function(){return"https://readthedocs.com"!==this.api_host&&this.is_sphinx_builder()&&this.theme_supports_promo()}};e.exports={get:i}},{"./constants":12}],15:[function(t,e,r){function i(t){var e=t.project,r=t.version,i=t.language||"en",n=t.api_host,o=function(t){var o=$.Deferred(),s=document.createElement("a");s.href=n,s.pathname="/api/v2/docsearch/",s.search="?q="+$.urlencode(t)+"&project="+e+"&version="+r+"&language="+i,o.then(function(r){var i=r.hits||{},n=i.hits||[];if(n.length)for(var o in n){var s=n[o],l=s.fields||{},c=$('
  • '),u=document.createElement("a"),d=s.highlight;if(u.href+=l.link+DOCUMENTATION_OPTIONS.FILE_SUFFIX,u.search="?highlight="+$.urlencode(t),c.append($("").attr("href",u).html(l.title)),l.project!==e&&c.append($("").text(" (from project "+l.project+")")),d.content.length){var f=$('
    ').html(a(d.content[0]));f.find("em").addClass("highlighted"),c.append(f)}Search.output.append(c),c.slideDown(5)}n.length?Search.status.text(_("Search finished, found %s page(s) matching the search query.").replace("%s",n.length)):Search.query_fallback(t)}).fail(function(e){Search.query_fallback(t)}).always(function(){$("#search-progress").empty(),Search.stopPulse(),Search.title.text(_("Search Results")),Search.status.fadeIn(500)}),$.ajax({url:s.href,crossDomain:!0,xhrFields:{withCredentials:!0},complete:function(t,e){return"undefined"==typeof t.responseJSON||"undefined"==typeof t.responseJSON.results?o.reject():o.resolve(t.responseJSON.results)}}).error(function(t,e,r){return o.reject()})};if("undefined"!=typeof Search&&e&&r){var s=Search.query;Search.query_fallback=s,Search.query=o}$(document).ready(function(){"undefined"!=typeof Search&&Search.init()})}function n(){var t=o.get();i(t)}var o=t("./rtd-data"),a=t("./../../../../../../bower_components/xss/lib/index");e.exports={init:n}},{"./../../../../../../bower_components/xss/lib/index":8,"./rtd-data":14}],16:[function(t,e,r){function i(){var t=n.get();if($(document).on("click","[data-toggle='rst-current-version']",function(){var t=$("[data-toggle='rst-versions']").hasClass("shift-up")?"was_open":"was_closed";"undefined"!=typeof ga?ga("rtfd.send","event","Flyout","Click",t):"undefined"!=typeof _gaq&&_gaq.push(["rtfd._setAccount","UA-17997319-1"],["rtfd._trackEvent","Flyout","Click",t])}),!("builder"in t)||"builder"in t&&"mkdocs"!==t.builder){var e=o.ThemeNav;if($(document).ready(function(){setTimeout(function(){e.navBar||e.enable()},1e3)}),t.is_rtd_theme()){var r=jquery("div.wy-side-scroll:first");if(!r.length){var i=jquery("nav.wy-nav-side:first"),a=$("
    ").addClass("wy-side-scroll");i.children().detach().appendTo(a),a.prependTo(i),e.navBar=a}}}}var n=t("./rtd-data"),o=t("./../../../../../../bower_components/sphinx-rtd-theme/js/theme.js");e.exports={init:i}},{"./../../../../../../bower_components/sphinx-rtd-theme/js/theme.js":6,"./rtd-data":14}],17:[function(t,e,r){function i(){var t,e="rtd-"+(Math.random()+1).toString(36).substring(4),r=l.PROMO_TYPES.LEFTNAV,i=null;return s.is_rtd_theme()?(i="nav.wy-nav-side > div.wy-side-scroll",t="ethical-rtd"):s.get_theme_name()!==l.THEME_ALABASTER&&s.get_theme_name()!==l.THEME_CELERY||(i="div.sphinxsidebar > div.sphinxsidebarwrapper",t="ethical-alabaster"),i?($("
    ").attr("id",e).addClass(t).appendTo(i),{div_id:e,display_type:r}):null}function n(){var t,e="rtd-"+(Math.random()+1).toString(36).substring(4),r=l.PROMO_TYPES.FOOTER,i=null;return s.is_rtd_theme()?(i=$("
    ").insertAfter("footer hr"),t="ethical-rtd"):s.get_theme_name()!==l.THEME_ALABASTER&&s.get_theme_name()!==l.THEME_CELERY||(i="div.bodywrapper .body",t="ethical-alabaster"),i?($("
    ").attr("id",e).addClass(t).appendTo(i),{div_id:e,display_type:r}):null}function o(t){this.id=t.id,this.div_id=t.div_id||"",this.html=t.html||"",this.display_type=t.display_type||"",this.click_handler=function(){"undefined"!=typeof ga?ga("rtfd.send","event","Promo","Click",t.id):"undefined"!=typeof _gaq&&_gaq.push(["rtfd._setAccount","UA-17997319-1"],["rtfd._trackEvent","Promo","Click",t.id])}}function a(){var t,e,r={format:"jsonp"},a=[],l=[],u=[n,i];if(s=c.get(),s.show_promo()){for(var d=0;d").insertAfter("#"+this.div_id),$("
    ").insertBefore("#"+this.div_id+".ethical-alabaster .ethical-footer"))},e.exports={Promo:o,init:a}},{"./constants":12,"./rtd-data":14}],18:[function(t,e,r){function i(t){var e=n.get();if(!t.is_highest){var r=window.location.pathname.replace(e.version,t.slug),i=$('
    ');i.find("a").attr("href",r).text(t.version);var o=$("div.body");o.length||(o=$("div.document")),o.prepend(i)}}var n=t("./rtd-data");e.exports={init:i}},{"./rtd-data":14}],19:[function(t,e,r){var i=t("./doc-embed/sponsorship"),n=t("./doc-embed/footer.js"),o=(t("./doc-embed/rtd-data"),t("./doc-embed/sphinx")),a=t("./doc-embed/search");$(document).ready(function(){n.init(),o.init(),a.init(),i.init()})},{"./doc-embed/footer.js":13,"./doc-embed/rtd-data":14,"./doc-embed/search":15,"./doc-embed/sphinx":16,"./doc-embed/sponsorship":17}]},{},[19]); \ No newline at end of file +!function(){function t(e,r,i){function n(a,s){if(!r[a]){if(!e[a]){var l="function"==typeof require&&require;if(!s&&l)return l(a,!0);if(o)return o(a,!0);var c=new Error("Cannot find module '"+a+"'");throw c.code="MODULE_NOT_FOUND",c}var u=r[a]={exports:{}};e[a][0].call(u.exports,function(t){var r=e[a][1][t];return n(r?r:t)},u,u.exports,t,e,r,i)}return r[a].exports}for(var o="function"==typeof require&&require,a=0;a
    "),t(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var r=t(this);expand=t(''),expand.on("click",function(t){return e.toggleCurrent(r),t.stopPropagation(),!1}),r.prepend(expand)})},t.reset=function(){var t=encodeURI(window.location.hash);if(t)try{var e=$(".wy-menu-vertical").find('[href="'+t+'"]');if(0===e.length){var r=$('.document a[href="'+t+'"]'),i=r.closest("div.section");e=$(".wy-menu-vertical").find('[href="#'+i.attr("id")+'"]')}$(".wy-menu-vertical li.toctree-l1 li.current").removeClass("current"),e.closest("li.toctree-l2").addClass("current"),e.closest("li.toctree-l3").addClass("current"),e.closest("li.toctree-l4").addClass("current")}catch(n){console.log("Error expanding nav for anchor",n)}},t.onScroll=function(){this.winScroll=!1;var t=this.win.scrollTop(),e=t+this.winHeight,r=this.navBar.scrollTop(),i=r+(t-this.winPosition);t<0||e>this.docHeight||(this.navBar.scrollTop(i),this.winPosition=t)},t.onResize=function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},t.hashChange=function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},t.toggleCurrent=function(t){var e=t.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")},t}var n="undefined"!=typeof window?window.jQuery:t("jquery");e.exports.ThemeNav=i(),"undefined"!=typeof window&&(window.SphinxRtdTheme={StickyNav:e.exports.ThemeNav})},{jquery:"jquery"}],7:[function(t,e,r){function i(){return{a:["target","href","title"],abbr:["title"],address:[],area:["shape","coords","href","alt"],article:[],aside:[],audio:["autoplay","controls","loop","preload","src"],b:[],bdi:["dir"],bdo:["dir"],big:[],blockquote:["cite"],br:[],caption:[],center:[],cite:[],code:[],col:["align","valign","span","width"],colgroup:["align","valign","span","width"],dd:[],del:["datetime"],details:["open"],div:[],dl:[],dt:[],em:[],font:["color","size","face"],footer:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],header:[],hr:[],i:[],img:["src","alt","title","width","height"],ins:["datetime"],li:[],mark:[],nav:[],ol:[],p:[],pre:[],s:[],section:[],small:[],span:[],sub:[],sup:[],strong:[],table:["width","border","align","valign"],tbody:["align","valign"],td:["width","rowspan","colspan","align","valign"],tfoot:["align","valign"],th:["width","rowspan","colspan","align","valign"],thead:["align","valign"],tr:["rowspan","align","valign"],tt:[],u:[],ul:[],video:["autoplay","controls","loop","preload","src","height","width"]}}function n(t,e,r){}function o(t,e,r){}function a(t,e,r){}function s(t,e,r){}function l(t){return t.replace(E,"<").replace(S,">")}function c(t,e,r,i){if(r=g(r),"href"===e||"src"===e){if(r=T.trim(r),"#"===r)return"#";if("http://"!==r.substr(0,7)&&"https://"!==r.substr(0,8)&&"mailto:"!==r.substr(0,7)&&"tel:"!==r.substr(0,4)&&"#"!==r[0]&&"/"!==r[0])return""}else if("background"===e){if(I.lastIndex=0,I.test(r))return""}else if("style"===e){if($.lastIndex=0,$.test(r))return"";if(H.lastIndex=0,H.test(r)&&(I.lastIndex=0,I.test(r)))return"";i!==!1&&(i=i||k,r=i.process(r))}return r=m(r)}function u(t){return t.replace(A,""")}function d(t){return t.replace(C,'"')}function f(t){return t.replace(R,function(t,e){return"x"===e[0]||"X"===e[0]?String.fromCharCode(parseInt(e.substr(1),16)):String.fromCharCode(parseInt(e,10))})}function p(t){return t.replace(j,":").replace(O," ")}function h(t){for(var e="",r=0,i=t.length;r/g,A=/"/g,C=/"/g,R=/&#([a-zA-Z0-9]*);?/gim,j=/:?/gim,O=/&newline;?/gim,I=/((j\s*a\s*v\s*a|v\s*b|l\s*i\s*v\s*e)\s*s\s*c\s*r\s*i\s*p\s*t\s*|m\s*o\s*c\s*h\s*a)\:/gi,$=/e\s*x\s*p\s*r\s*e\s*s\s*s\s*i\s*o\s*n\s*\(.*/gi,H=/u\s*r\s*l\s*\(.*/gi,L=//g;r.whiteList=i(),r.getDefaultWhiteList=i,r.onTag=n,r.onIgnoreTag=o,r.onTagAttr=a,r.onIgnoreTagAttr=s,r.safeAttrValue=c,r.escapeHtml=l,r.escapeQuote=u,r.unescapeQuote=d,r.escapeHtmlEntities=f,r.escapeDangerHtml5Entities=p,r.clearNonPrintableCharacter=h,r.friendlyAttrValue=g,r.escapeAttrValue=m,r.onIgnoreTagStripAll=v,r.StripTagBody=b,r.stripCommentTag=w,r.stripBlankChar=y,r.cssFilter=k,r.getDefaultCSSWhiteList=_},{"./util":10,cssfilter:3}],8:[function(t,e,r){function i(t,e){var r=new a(e);return r.process(t)}var n=t("./default"),o=t("./parser"),a=t("./xss");r=e.exports=i,r.FilterXSS=a;for(var s in n)r[s]=n[s];for(var s in o)r[s]=o[s];"undefined"!=typeof window&&(window.filterXSS=e.exports)},{"./default":7,"./parser":9,"./xss":11}],9:[function(t,e,r){function i(t){var e=d.spaceIndex(t);if(e===-1)var r=t.slice(1,-1);else var r=t.slice(1,e+1);return r=d.trim(r).toLowerCase(),"/"===r.slice(0,1)&&(r=r.slice(1)),"/"===r.slice(-1)&&(r=r.slice(0,-1)),r}function n(t){return""===p){o+=r(t.slice(a,s)),d=t.slice(s,c+1),f=i(d),o+=e(s,o.length,f,d,n(d)),a=c+1,s=!1;continue}if(('"'===p||"'"===p)&&"="===t.charAt(c-1)){l=p;continue}}else if(p===l){l=!1;continue}}return a0;e--){var r=t[e];if(" "!==r)return"="===r?e:-1}}function c(t){return'"'===t[0]&&'"'===t[t.length-1]||"'"===t[0]&&"'"===t[t.length-1]}function u(t){return c(t)?t.substr(1,t.length-2):t}var d=t("./util"),f=/[^a-zA-Z0-9_:\.\-]/gim;r.parseTag=o,r.parseAttr=a},{"./util":10}],10:[function(t,e,r){e.exports={indexOf:function(t,e){var r,i;if(Array.prototype.indexOf)return t.indexOf(e);for(r=0,i=t.length;r";var w=n(l),y=o[r],x=d(w.html,function(t,e){var n=f.indexOf(y,t)!==-1,o=c(r,t,e,n);if(!i(o))return o;if(n)return e=h(r,t,e,m),e?t+'="'+e+'"':t;var o=p(r,t,e,n);return i(o)?void 0:o}),l="<"+r;return x&&(l+=" "+x),w.closing&&(l+=" /"),l+=">"}var b=s(r,l,v);return i(b)?g(l):b},g);return v&&(b=v.remove(b)),b},e.exports=a},{"./default":7,"./parser":9,"./util":10,cssfilter:3}],12:[function(t,e,r){var r={THEME_RTD:"sphinx_rtd_theme",THEME_ALABASTER:"alabaster",THEME_CELERY:"sphinx_celery"};r.PROMO_SUPPORTED_THEMES=[r.THEME_RTD,r.THEME_ALABASTER,r.THEME_CELERY],r.PROMO_TYPES={LEFTNAV:"doc",FOOTER:"site-footer"},e.exports=r},{}],13:[function(t,e,r){function i(t){var e=a.get();e.is_rtd_theme()?$("div.rst-other-versions").html(t.html):$("body").append(t.html),t.version_active?!t.version_supported:$(".rst-current-version").addClass("rst-out-of-date")}function n(){function t(t){return/^(GET|HEAD|OPTIONS|TRACE)$/.test(t)}$.ajaxSetup({beforeSend:function(e,r){t(r.type)||e.setRequestHeader("X-CSRFToken",$("a.bookmark[token]").attr("token"))}})}function o(){var t=a.get(),e={project:t.project,version:t.version,page:t.page,theme:t.get_theme_name(),format:"jsonp"};"docroot"in t&&(e.docroot=t.docroot),"source_suffix"in t&&(e.source_suffix=t.source_suffix),0===window.location.pathname.indexOf("/projects/")&&(e.subproject=!0),$.ajax({url:t.api_host+"/api/v2/footer_html/",crossDomain:!0,xhrFields:{withCredentials:!0},dataType:"jsonp",data:e,success:function(t){s.init(t.version_compare),i(t),n()},error:function(){console.error("Error loading Read the Docs footer")}})}var a=t("./rtd-data"),s=t("./version-compare");e.exports={init:o}},{"./rtd-data":14,"./version-compare":18}],14:[function(t,e,r){function i(){var t=Object.create(o),e={api_host:"https://readthedocs.org"};return $.extend(t,e,window.READTHEDOCS_DATA),t}var n=t("./constants"),o={is_rtd_theme:function(){return this.get_theme_name()===n.THEME_RTD},theme_supports_promo:function(){return n.PROMO_SUPPORTED_THEMES.indexOf(this.get_theme_name())>-1},is_sphinx_builder:function(){return!("builder"in this)||"mkdocs"!==this.builder},get_theme_name:function(){return this.theme!==n.THEME_RTD&&1===$("div.rst-other-versions").length?n.THEME_RTD:this.theme},show_promo:function(){return"https://readthedocs.com"!==this.api_host&&this.is_sphinx_builder()&&this.theme_supports_promo()}};e.exports={get:i}},{"./constants":12}],15:[function(t,e,r){function i(t){var e=t.project,r=t.version,i=t.language||"en",n=t.api_host,o=function(t){var o=$.Deferred(),s=document.createElement("a");s.href=n,s.pathname="/api/v2/docsearch/",s.search="?q="+$.urlencode(t)+"&project="+e+"&version="+r+"&language="+i,o.then(function(r){var i=r.hits||{},n=i.hits||[];if(n.length)for(var o in n){var s=n[o],l=s.fields||{},c=$('
  • '),u=document.createElement("a"),d=s.highlight;if(u.href+=l.link+DOCUMENTATION_OPTIONS.FILE_SUFFIX,u.search="?highlight="+$.urlencode(t),c.append($("").attr("href",u).html(l.title)),l.project!==e&&c.append($("").text(" (from project "+l.project+")")),d.content.length){var f=$('
    ').html(a(d.content[0]));f.find("em").addClass("highlighted"),c.append(f)}Search.output.append(c),c.slideDown(5)}n.length?Search.status.text(_("Search finished, found %s page(s) matching the search query.").replace("%s",n.length)):Search.query_fallback(t)}).fail(function(e){Search.query_fallback(t)}).always(function(){$("#search-progress").empty(),Search.stopPulse(),Search.title.text(_("Search Results")),Search.status.fadeIn(500)}),$.ajax({url:s.href,crossDomain:!0,xhrFields:{withCredentials:!0},complete:function(t,e){return"undefined"==typeof t.responseJSON||"undefined"==typeof t.responseJSON.results?o.reject():o.resolve(t.responseJSON.results)}}).error(function(t,e,r){return o.reject()})};if("undefined"!=typeof Search&&e&&r){var s=Search.query;Search.query_fallback=s,Search.query=o}$(document).ready(function(){"undefined"!=typeof Search&&Search.init()})}function n(){var t=o.get();i(t)}var o=t("./rtd-data"),a=t("./../../../../../../bower_components/xss/lib/index");e.exports={init:n}},{"./../../../../../../bower_components/xss/lib/index":8,"./rtd-data":14}],16:[function(t,e,r){function i(){var t=n.get();if($(document).on("click","[data-toggle='rst-current-version']",function(){var t=$("[data-toggle='rst-versions']").hasClass("shift-up")?"was_open":"was_closed";"undefined"!=typeof ga?ga("rtfd.send","event","Flyout","Click",t):"undefined"!=typeof _gaq&&_gaq.push(["rtfd._setAccount","UA-17997319-1"],["rtfd._trackEvent","Flyout","Click",t])}),!("builder"in t)||"builder"in t&&"mkdocs"!==t.builder){var e=o.ThemeNav;if($(document).ready(function(){setTimeout(function(){e.navBar||e.enable()},1e3)}),t.is_rtd_theme()){var r=jquery("div.wy-side-scroll:first");if(!r.length){var i=jquery("nav.wy-nav-side:first"),a=$("
    ").addClass("wy-side-scroll");i.children().detach().appendTo(a),a.prependTo(i),e.navBar=a}}}}var n=t("./rtd-data"),o=t("./../../../../../../bower_components/sphinx-rtd-theme/js/theme.js");e.exports={init:i}},{"./../../../../../../bower_components/sphinx-rtd-theme/js/theme.js":6,"./rtd-data":14}],17:[function(t,e,r){function i(){var t,e="rtd-"+(Math.random()+1).toString(36).substring(4),r=l.PROMO_TYPES.LEFTNAV,i=null;return s.is_rtd_theme()?(i="nav.wy-nav-side > div.wy-side-scroll",t="ethical-rtd"):s.get_theme_name()!==l.THEME_ALABASTER&&s.get_theme_name()!==l.THEME_CELERY||(i="div.sphinxsidebar > div.sphinxsidebarwrapper",t="ethical-alabaster"),i?($("
    ").attr("id",e).addClass(t).appendTo(i),{div_id:e,display_type:r}):null}function n(){var t,e="rtd-"+(Math.random()+1).toString(36).substring(4),r=l.PROMO_TYPES.FOOTER,i=null;return s.is_rtd_theme()?(i=$("
    ").insertAfter("footer hr"),t="ethical-rtd"):s.get_theme_name()!==l.THEME_ALABASTER&&s.get_theme_name()!==l.THEME_CELERY||(i="div.bodywrapper .body",t="ethical-alabaster"),i?($("
    ").attr("id",e).addClass(t).appendTo(i),{div_id:e,display_type:r}):null}function o(t){this.id=t.id,this.div_id=t.div_id||"",this.html=t.html||"",this.display_type=t.display_type||"",this.click_handler=function(){"undefined"!=typeof ga?ga("rtfd.send","event","Promo","Click",t.id):"undefined"!=typeof _gaq&&_gaq.push(["rtfd._setAccount","UA-17997319-1"],["rtfd._trackEvent","Promo","Click",t.id])}}function a(){var t,e,r={format:"jsonp"},a=[],l=[],u=[n,i];if(s=c.get(),s.show_promo()){for(var d=0;d").insertAfter("#"+this.div_id),$("
    ").insertBefore("#"+this.div_id+".ethical-alabaster .ethical-footer"))},e.exports={Promo:o,init:a}},{"./constants":12,"./rtd-data":14}],18:[function(t,e,r){function i(t){var e=n.get();if(!t.is_highest){var r=window.location.pathname.replace(e.version,t.slug),i=$('
    ');i.find("a").attr("href",r).text(t.version);var o=$("div.body");o.length||(o=$("div.document")),o.prepend(i)}}var n=t("./rtd-data");e.exports={init:i}},{"./rtd-data":14}],19:[function(t,e,r){var i=t("./doc-embed/sponsorship"),n=t("./doc-embed/footer.js"),o=(t("./doc-embed/rtd-data"),t("./doc-embed/sphinx")),a=t("./doc-embed/search");$(document).ready(function(){n.init(),o.init(),a.init(),i.init()})},{"./doc-embed/footer.js":13,"./doc-embed/rtd-data":14,"./doc-embed/search":15,"./doc-embed/sphinx":16,"./doc-embed/sponsorship":17}]},{},[19]); diff --git a/readthedocs/core/static/core/js/site.js b/readthedocs/core/static/core/js/site.js index 7e01634bf36..bd347acc0d7 100644 --- a/readthedocs/core/static/core/js/site.js +++ b/readthedocs/core/static/core/js/site.js @@ -1 +1 @@ -require=function(){function t(r,n,i){function e(u,f){if(!n[u]){if(!r[u]){var a="function"==typeof require&&require;if(!f&&a)return a(u,!0);if(o)return o(u,!0);var c=new Error("Cannot find module '"+u+"'");throw c.code="MODULE_NOT_FOUND",c}var s=n[u]={exports:{}};r[u][0].call(s.exports,function(t){var n=r[u][1][t];return e(n?n:t)},s,s.exports,t,r,n,i)}return n[u].exports}for(var o="function"==typeof require&&require,u=0;u a").click(function(t){var r=$(this).attr("href"),n=$(this).parent().attr("data-dismiss-url");n?(t.preventDefault(),$.get(n,function(t,n,i){window.location.href=r})):$(this).hide()})})}},{}]},{},[]); \ No newline at end of file +require=function(){function t(r,n,i){function e(u,f){if(!n[u]){if(!r[u]){var a="function"==typeof require&&require;if(!f&&a)return a(u,!0);if(o)return o(u,!0);var c=new Error("Cannot find module '"+u+"'");throw c.code="MODULE_NOT_FOUND",c}var s=n[u]={exports:{}};r[u][0].call(s.exports,function(t){var n=r[u][1][t];return e(n?n:t)},s,s.exports,t,r,n,i)}return n[u].exports}for(var o="function"==typeof require&&require,u=0;u a").click(function(t){var r=$(this).attr("href"),n=$(this).parent().attr("data-dismiss-url");n?(t.preventDefault(),$.get(n,function(t,n,i){window.location.href=r})):$(this).hide()})})}},{}]},{},[]); diff --git a/readthedocs/core/templatetags/core_tags.py b/readthedocs/core/templatetags/core_tags.py index 7a91df87608..6a23be8f512 100644 --- a/readthedocs/core/templatetags/core_tags.py +++ b/readthedocs/core/templatetags/core_tags.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- """Template tags for core app.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) import hashlib @@ -15,30 +17,31 @@ from readthedocs.core.resolver import resolve from readthedocs.projects.models import Project - register = template.Library() @register.filter def gravatar(email, size=48): """ - Hacked from djangosnippets.org, but basically given an email address + Hacked from djangosnippets.org, but basically given an email address. render an img tag with the hashed up bits needed for leetness omgwtfstillreading """ - url = "http://www.gravatar.com/avatar.php?%s" % urlencode({ + url = 'http://www.gravatar.com/avatar.php?%s' % urlencode({ 'gravatar_id': hashlib.md5(email).hexdigest(), - 'size': str(size) + 'size': str(size), }) - return ('gravatar' % (url, size, size)) + return ( + 'gravatar' % (url, size, size) + ) -@register.simple_tag(name="doc_url") +@register.simple_tag(name='doc_url') def make_document_url(project, version=None, page=''): if not project: - return "" + return '' return resolve(project=project, version_slug=version, filename=page) @@ -50,7 +53,7 @@ def restructuredtext(value, short=False): if settings.DEBUG: raise template.TemplateSyntaxError( "Error in 'restructuredtext' filter: " - "The Python docutils library isn't installed." + "The Python docutils library isn't installed.", ) return force_text(value) else: @@ -58,13 +61,18 @@ def restructuredtext(value, short=False): 'raw_enabled': False, 'file_insertion_enabled': False, } - docutils_settings.update(getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {})) - parts = publish_parts(source=force_bytes(value), writer_name="html4css1", - settings_overrides=docutils_settings) - out = force_text(parts["fragment"]) + docutils_settings.update( + getattr(settings, 'RESTRUCTUREDTEXT_FILTER_SETTINGS', {}), + ) + parts = publish_parts( + source=force_bytes(value), + writer_name='html4css1', + settings_overrides=docutils_settings, + ) + out = force_text(parts['fragment']) try: if short: - out = out.split("\n")[0] + out = out.split('\n')[0] except IndexError: pass return mark_safe(out) diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py index 4564f7394d6..599b2262a94 100644 --- a/readthedocs/core/views/hooks.py +++ b/readthedocs/core/views/hooks.py @@ -1,21 +1,23 @@ +# -*- coding: utf-8 -*- """Views pertaining to builds.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import json +import logging import re from django.http import HttpResponse, HttpResponseNotFound from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt -from readthedocs.core.utils import trigger_build from readthedocs.builds.constants import LATEST +from readthedocs.core.utils import trigger_build from readthedocs.projects import constants -from readthedocs.projects.models import Project, Feature +from readthedocs.projects.models import Feature, Project from readthedocs.projects.tasks import SyncRepositoryTask -import logging - log = logging.getLogger(__name__) @@ -43,26 +45,31 @@ def _build_version(project, slug, already_built=()): # active latest_version = project.versions.get(slug=LATEST) trigger_build(project=project, version=latest_version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, latest_version.slug) + log.info( + '(Version build) Building %s:%s', + project.slug, + latest_version.slug, + ) if project.versions.exclude(active=False).filter(slug=slug).exists(): # Handle the case where we want to build the custom branch too slug_version = project.versions.get(slug=slug) trigger_build(project=project, version=slug_version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, slug_version.slug) + log.info( + '(Version build) Building %s:%s', + project.slug, + slug_version.slug, + ) return LATEST elif project.versions.exclude(active=True).filter(slug=slug).exists(): - log.info("(Version build) Not Building %s", slug) + log.info('(Version build) Not Building %s', slug) return None elif slug not in already_built: version = project.versions.get(slug=slug) trigger_build(project=project, version=version, force=True) - log.info("(Version build) Building %s:%s", - project.slug, version.slug) + log.info('(Version build) Building %s:%s', project.slug, version.slug) return slug else: - log.info("(Version build) Not Building %s", slug) + log.info('(Version build) Not Building %s', slug) return None @@ -79,8 +86,11 @@ def build_branches(project, branch_list): for branch in branch_list: versions = project.versions_from_branch_name(branch) for version in versions: - log.info("(Branch Build) Processing %s:%s", - project.slug, version.slug) + log.info( + '(Branch Build) Processing %s:%s', + project.slug, + version.slug, + ) ret = _build_version(project, version.slug, already_built=to_build) if ret: to_build.add(ret) @@ -92,15 +102,15 @@ def build_branches(project, branch_list): def get_project_from_url(url): projects = ( Project.objects.filter(repo__iendswith=url) | - Project.objects.filter(repo__iendswith=url + '.git')) + Project.objects.filter(repo__iendswith=url + '.git') + ) return projects def log_info(project, msg): - log.info(constants.LOG_TEMPLATE - .format(project=project, - version='', - msg=msg)) + log.info( + constants.LOG_TEMPLATE.format(project=project, version='', msg=msg), + ) def _build_url(url, projects, branches): @@ -110,7 +120,7 @@ def _build_url(url, projects, branches): Check each of the ``branches`` to see if they are active and should be built. """ - ret = "" + ret = '' all_built = {} all_not_building = {} @@ -126,9 +136,7 @@ def _build_url(url, projects, branches): # Call SyncRepositoryTask to update tag/branch info version = project.versions.get(slug=LATEST) sync_repository = SyncRepositoryTask() - sync_repository.apply_async( - args=(version.pk,), - ) + sync_repository.apply_async(args=(version.pk,),) msg = '(URL Build) Syncing versions for %s' % project.slug log.info(msg) all_built[project.slug] = built @@ -136,15 +144,16 @@ def _build_url(url, projects, branches): for project_slug, built in list(all_built.items()): if built: - msg = '(URL Build) Build Started: %s [%s]' % ( - url, ' '.join(built)) + msg = '(URL Build) Build Started: %s [%s]' % (url, ' '.join(built)) log_info(project_slug, msg=msg) ret += msg for project_slug, not_building in list(all_not_building.items()): if not_building: msg = '(URL Build) Not Building: %s [%s]' % ( - url, ' '.join(not_building)) + url, + ' '.join(not_building), + ) log_info(project_slug, msg=msg) ret += msg @@ -173,12 +182,16 @@ def github_build(request): # noqa: D205 """ if request.method == 'POST': try: - if request.META['CONTENT_TYPE'] == 'application/x-www-form-urlencoded': + if request.META['CONTENT_TYPE' + ] == 'application/x-www-form-urlencoded': data = json.loads(request.POST.get('payload')) else: data = json.loads(request.body) http_url = data['repository']['url'] - http_search_url = http_url.replace('http://', '').replace('https://', '') + http_search_url = http_url.replace( + 'http://', + '', + ).replace('https://', '') ssh_url = data['repository']['ssh_url'] ssh_search_url = ssh_url.replace('git@', '').replace('.git', '') branches = [data['ref'].replace('refs/heads/', '')] @@ -191,14 +204,14 @@ def github_build(request): # noqa: D205 log.info( 'GitHub webhook search: url=%s branches=%s', http_search_url, - branches + branches, ) ssh_projects = get_project_from_url(ssh_search_url) if ssh_projects: log.info( 'GitHub webhook search: url=%s branches=%s', ssh_search_url, - branches + branches, ) projects = repo_projects | ssh_projects return _build_url(http_search_url, projects, branches) @@ -233,12 +246,13 @@ def gitlab_build(request): # noqa: D205 log.info( 'GitLab webhook search: url=%s branches=%s', search_url, - branches + branches, ) projects = get_project_from_url(search_url) if projects: return _build_url(search_url, projects, branches) - log.error('Project match not found: url=%s', search_url) + + log.info('Project match not found: url=%s', search_url) return HttpResponseNotFound('Project match not found') return HttpResponse('Method not allowed, POST is required', status=405) @@ -267,25 +281,28 @@ def bitbucket_build(request): """ if request.method == 'POST': try: - if request.META['CONTENT_TYPE'] == 'application/x-www-form-urlencoded': + if request.META['CONTENT_TYPE' + ] == 'application/x-www-form-urlencoded': data = json.loads(request.POST.get('payload')) else: data = json.loads(request.body) - version = 2 if request.META.get('HTTP_USER_AGENT') == 'Bitbucket-Webhooks/2.0' else 1 + version = 2 if request.META.get( + 'HTTP_USER_AGENT', + ) == 'Bitbucket-Webhooks/2.0' else 1 if version == 1: - branches = [commit.get('branch', '') - for commit in data['commits']] + branches = [ + commit.get('branch', '') for commit in data['commits'] + ] repository = data['repository'] search_url = 'bitbucket.org{0}'.format( - repository['absolute_url'].rstrip('/') + repository['absolute_url'].rstrip('/'), ) elif version == 2: changes = data['push']['changes'] - branches = [change['new']['name'] - for change in changes] + branches = [change['new']['name'] for change in changes] search_url = 'bitbucket.org/{0}'.format( - data['repository']['full_name'] + data['repository']['full_name'], ) except (TypeError, ValueError, KeyError): log.exception('Invalid Bitbucket webhook payload') @@ -294,7 +311,7 @@ def bitbucket_build(request): log.info( 'Bitbucket webhook search: url=%s branches=%s', search_url, - branches + branches, ) log.debug('Bitbucket webhook payload:\n\n%s\n\n', data) projects = get_project_from_url(search_url) @@ -304,10 +321,10 @@ def bitbucket_build(request): log.error( 'Commit/branch not found url=%s branches=%s', search_url, - branches + branches, ) return HttpResponseNotFound('Commit/branch not found') - log.error('Project match not found: url=%s', search_url) + log.info('Project match not found: url=%s', search_url) return HttpResponseNotFound('Project match not found') return HttpResponse('Method not allowed, POST is required', status=405) @@ -330,10 +347,12 @@ def generic_build(request, project_id_or_slug=None): project = Project.objects.get(slug=project_id_or_slug) except (Project.DoesNotExist, ValueError): log.exception( - "(Incoming Generic Build) Repo not found: %s", - project_id_or_slug) + '(Incoming Generic Build) Repo not found: %s', + project_id_or_slug, + ) return HttpResponseNotFound( - 'Repo not found: %s' % project_id_or_slug) + 'Repo not found: %s' % project_id_or_slug, + ) # This endpoint doesn't require authorization, we shouldn't allow builds to # be triggered from this any longer. Deprecation plan is to selectively # allow access to this endpoint for now. @@ -341,9 +360,8 @@ def generic_build(request, project_id_or_slug=None): return HttpResponse('This API endpoint is deprecated', status=403) if request.method == 'POST': slug = request.POST.get('version_slug', project.default_version) - log.info( - "(Incoming Generic Build) %s [%s]", project.slug, slug) + log.info('(Incoming Generic Build) %s [%s]', project.slug, slug) _build_version(project, slug) else: - return HttpResponse("You must POST to this resource.") + return HttpResponse('You must POST to this resource.') return redirect('builds_project_list', project.slug) diff --git a/readthedocs/core/views/serve.py b/readthedocs/core/views/serve.py index b4afe757454..b29898a5cac 100644 --- a/readthedocs/core/views/serve.py +++ b/readthedocs/core/views/serve.py @@ -34,9 +34,8 @@ from functools import wraps from django.conf import settings -from django.http import HttpResponse, HttpResponseRedirect, Http404 -from django.shortcuts import get_object_or_404 -from django.shortcuts import render +from django.http import Http404, HttpResponse, HttpResponseRedirect +from django.shortcuts import get_object_or_404, render from django.views.static import serve from readthedocs.builds.models import Version @@ -57,8 +56,11 @@ def map_subproject_slug(view_func): .. warning:: Does not take into account any kind of privacy settings. """ + @wraps(view_func) - def inner_view(request, subproject=None, subproject_slug=None, *args, **kwargs): # noqa + def inner_view( + request, subproject=None, subproject_slug=None, *args, **kwargs + ): # noqa if subproject is None and subproject_slug: # Try to fetch by subproject alias first, otherwise we might end up # redirected to an unrelated project. @@ -84,8 +86,11 @@ def map_project_slug(view_func): .. warning:: Does not take into account any kind of privacy settings. """ + @wraps(view_func) - def inner_view(request, project=None, project_slug=None, *args, **kwargs): # noqa + def inner_view( + request, project=None, project_slug=None, *args, **kwargs + ): # noqa if project is None: if not project_slug: project_slug = request.slug @@ -110,7 +115,8 @@ def redirect_project_slug(request, project, subproject): # pylint: disable=unus def redirect_page_with_filename(request, project, subproject, filename): # pylint: disable=unused-argument # noqa """Redirect /page/file.html to /en/latest/file.html.""" return HttpResponseRedirect( - resolve(subproject or project, filename=filename)) + resolve(subproject or project, filename=filename), + ) def _serve_401(request, project): @@ -128,7 +134,8 @@ def _serve_file(request, filename, basepath): else: # Serve from Nginx content_type, encoding = mimetypes.guess_type( - os.path.join(basepath, filename)) + os.path.join(basepath, filename), + ) content_type = content_type or 'application/octet-stream' response = HttpResponse(content_type=content_type) if encoding: @@ -147,9 +154,15 @@ def _serve_file(request, filename, basepath): @map_project_slug @map_subproject_slug def serve_docs( - request, project, subproject, lang_slug=None, version_slug=None, - filename=''): - """Exists to map existing proj, lang, version, filename views to the file format.""" + request, + project, + subproject, + lang_slug=None, + version_slug=None, + filename='', +): + """Exists to map existing proj, lang, version, filename views to the file + format.""" if not version_slug: version_slug = project.get_default_version() try: @@ -213,4 +226,5 @@ def _serve_symlink_docs(request, project, privacy_level, filename=''): files_tried.append(os.path.join(basepath, filename)) raise Http404( - 'File not found. Tried these files: %s' % ','.join(files_tried)) + 'File not found. Tried these files: %s' % ','.join(files_tried), + ) diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py index dfb5cda8b82..6ea5c13597b 100644 --- a/readthedocs/doc_builder/backends/mkdocs.py +++ b/readthedocs/doc_builder/backends/mkdocs.py @@ -1,14 +1,17 @@ +# -*- coding: utf-8 -*- """ MkDocs_ backend for building docs. .. _MkDocs: http://www.mkdocs.org/ """ -from __future__ import absolute_import -import os -import logging +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import json -import yaml +import logging +import os +import yaml from django.conf import settings from django.template import loader as template_loader @@ -46,7 +49,8 @@ def __init__(self, *args, **kwargs): super(BaseMkdocs, self).__init__(*args, **kwargs) self.old_artifact_path = os.path.join( self.version.project.checkout_path(self.version.slug), - self.build_dir) + self.build_dir, + ) self.root_path = self.version.project.checkout_path(self.version.slug) def load_yaml_config(self): @@ -57,7 +61,7 @@ def load_yaml_config(self): """ try: return yaml.safe_load( - open(os.path.join(self.root_path, 'mkdocs.yml'), 'r') + open(os.path.join(self.root_path, 'mkdocs.yml'), 'r'), ) except IOError: return { @@ -67,11 +71,14 @@ def load_yaml_config(self): note = '' if hasattr(exc, 'problem_mark'): mark = exc.problem_mark - note = ' (line %d, column %d)' % (mark.line + 1, mark.column + 1) + note = ' (line %d, column %d)' % ( + mark.line + 1, + mark.column + 1, + ) raise BuildEnvironmentError( - "Your mkdocs.yml could not be loaded, " - "possibly due to a syntax error%s" % ( - note,)) + 'Your mkdocs.yml could not be loaded, ' + 'possibly due to a syntax error%s' % (note,), + ) def append_conf(self, **__): """Set mkdocs config values.""" @@ -89,7 +96,7 @@ def append_conf(self, **__): user_config.setdefault('extra_javascript', []).extend([ 'readthedocs-data.js', 'readthedocs-dynamic-include.js', - '%sstatic/core/js/readthedocs-doc-embed.js' % media_url + '%sstatic/core/js/readthedocs-doc-embed.js' % media_url, ]) user_config.setdefault('extra_css', []).extend([ '%scss/badge_only.css' % media_url, @@ -102,7 +109,7 @@ def append_conf(self, **__): yaml.safe_dump( user_config, - open(os.path.join(self.root_path, 'mkdocs.yml'), 'w') + open(os.path.join(self.root_path, 'mkdocs.yml'), 'w'), ) docs_path = os.path.join(self.root_path, docs_dir) @@ -113,7 +120,10 @@ def append_conf(self, **__): f.write(rtd_data) dynamic_include = self.generate_dynamic_include() - with open(os.path.join(docs_path, 'readthedocs-dynamic-include.js'), 'w') as f: + with open( + os.path.join(docs_path, 'readthedocs-dynamic-include.js'), + 'w', + ) as f: f.write(dynamic_include) def generate_rtd_data(self, docs_dir): @@ -125,11 +135,15 @@ def generate_rtd_data(self, docs_dir): 'language': self.version.project.language, 'programming_language': self.version.project.programming_language, 'page': None, - 'theme': "readthedocs", - 'builder': "mkdocs", + 'theme': 'readthedocs', + 'builder': 'mkdocs', 'docroot': docs_dir, - 'source_suffix': ".md", - 'api_host': getattr(settings, 'PUBLIC_API_URL', 'https://readthedocs.org'), + 'source_suffix': '.md', + 'api_host': getattr( + settings, + 'PUBLIC_API_URL', + 'https://readthedocs.org', + ), 'commit': self.version.project.vcs_repo(self.version.slug).commit, } data_json = json.dumps(readthedocs_data, indent=4) @@ -145,7 +159,11 @@ def generate_rtd_data(self, docs_dir): def generate_dynamic_include(self): include_ctx = { - 'global_analytics_code': getattr(settings, 'GLOBAL_ANALYTICS_CODE', 'UA-17997319-1'), + 'global_analytics_code': getattr( + settings, + 'GLOBAL_ANALYTICS_CODE', + 'UA-17997319-1', + ), 'user_analytics_code': self.version.project.analytics_code, } tmpl = template_loader.get_template('doc_builder/include.js.tmpl') @@ -158,13 +176,13 @@ def build(self): self.python_env.venv_bin(filename='mkdocs'), self.builder, '--clean', - '--site-dir', self.build_dir, + '--site-dir', + self.build_dir, ] if self.use_theme: build_command.extend(['--theme', 'readthedocs']) cmd_ret = self.run( - *build_command, - cwd=checkout_path, + *build_command, cwd=checkout_path, bin_path=self.python_env.venv_bin() ) return cmd_ret.successful @@ -184,12 +202,12 @@ class MkdocsJSON(BaseMkdocs): def build(self): user_config = yaml.safe_load( - open(os.path.join(self.root_path, 'mkdocs.yml'), 'r') + open(os.path.join(self.root_path, 'mkdocs.yml'), 'r'), ) if user_config['theme_dir'] == TEMPLATE_DIR: del user_config['theme_dir'] yaml.safe_dump( user_config, - open(os.path.join(self.root_path, 'mkdocs.yml'), 'w') + open(os.path.join(self.root_path, 'mkdocs.yml'), 'w'), ) return super(MkdocsJSON, self).build() diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py index 7123cf983cf..702d0ae3a9f 100644 --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -1,23 +1,26 @@ # -*- coding: utf-8 -*- - """Documentation Builder Environments.""" -from __future__ import absolute_import -from builtins import str -from builtins import object +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + +import logging import os import re -import sys -import logging +import socket import subprocess +import sys import traceback -import socket from datetime import datetime +import six +from builtins import object, str from django.conf import settings from django.utils.translation import ugettext_lazy as _ from docker import APIClient -from docker.errors import APIError as DockerAPIError, DockerException +from docker.errors import APIError as DockerAPIError +from docker.errors import DockerException +from requests.exceptions import ConnectionError from slumber.exceptions import HttpClientError from readthedocs.builds.constants import BUILD_STATE_FINISHED @@ -25,24 +28,24 @@ from readthedocs.core.utils import slugify from readthedocs.projects.constants import LOG_TEMPLATE from readthedocs.restapi.client import api as api_v2 -from requests.exceptions import ConnectionError -from .exceptions import (BuildEnvironmentException, BuildEnvironmentError, - BuildEnvironmentWarning, BuildEnvironmentCreationFailed) -from .constants import (DOCKER_SOCKET, DOCKER_VERSION, DOCKER_IMAGE, - DOCKER_LIMITS, DOCKER_TIMEOUT_EXIT_CODE, - DOCKER_OOM_EXIT_CODE, SPHINX_TEMPLATE_DIR, - MKDOCS_TEMPLATE_DIR, DOCKER_HOSTNAME_MAX_LEN) -import six +from .constants import ( + DOCKER_HOSTNAME_MAX_LEN, DOCKER_IMAGE, DOCKER_LIMITS, DOCKER_OOM_EXIT_CODE, + DOCKER_SOCKET, DOCKER_TIMEOUT_EXIT_CODE, DOCKER_VERSION, + MKDOCS_TEMPLATE_DIR, SPHINX_TEMPLATE_DIR) +from .exceptions import ( + BuildEnvironmentCreationFailed, BuildEnvironmentError, + BuildEnvironmentException, BuildEnvironmentWarning) log = logging.getLogger(__name__) - __all__ = ( 'api_v2', - 'BuildCommand', 'DockerBuildCommand', + 'BuildCommand', + 'DockerBuildCommand', 'LocalEnvironment', - 'LocalBuildEnvironment', 'DockerBuildEnvironment', + 'LocalBuildEnvironment', + 'DockerBuildEnvironment', ) @@ -70,9 +73,19 @@ class BuildCommand(BuildCommandResultMixin): :param description: a more grokable description of the command being run """ - def __init__(self, command, cwd=None, shell=False, environment=None, - combine_output=True, input_data=None, build_env=None, - bin_path=None, description=None, record_as_success=False): + def __init__( + self, + command, + cwd=None, + shell=False, + environment=None, + combine_output=True, + input_data=None, + build_env=None, + bin_path=None, + description=None, + record_as_success=False, + ): self.command = command self.shell = shell if cwd is None: @@ -177,7 +190,10 @@ def run(self): def get_command(self): """Flatten command.""" - if hasattr(self.command, '__iter__') and not isinstance(self.command, str): + if hasattr( + self.command, + '__iter__', + ) and not isinstance(self.command, str): return ' '.join(self.command) return self.command @@ -218,8 +234,12 @@ def run(self): :type cmd_input: str :param combine_output: combine STDERR into STDOUT """ - log.info("Running in container %s: '%s' [%s]", - self.build_env.container_id, self.get_command(), self.cwd) + log.info( + "Running in container %s: '%s' [%s]", + self.build_env.container_id, + self.get_command(), + self.cwd, + ) self.start_time = datetime.utcnow() client = self.build_env.get_client() @@ -228,7 +248,7 @@ def run(self): container=self.build_env.container_id, cmd=self.get_wrapped_command(), stdout=True, - stderr=True + stderr=True, ) output = client.exec_start(exec_id=exec_cmd['Id'], stream=False) @@ -244,10 +264,15 @@ def run(self): # nicer. Sometimes the kernel kills the command and Docker doesn't # not use the specific exit code, so we check if the word `Killed` # is in the last 15 lines of the command's output - killed_in_output = 'Killed' in '\n'.join(self.output.splitlines()[-15:]) - if self.exit_code == DOCKER_OOM_EXIT_CODE or (self.exit_code == 1 and killed_in_output): - self.output = _('Command killed due to excessive memory ' - 'consumption\n') + killed_in_output = 'Killed' in '\n'.join( + self.output.splitlines()[-15:], + ) + if self.exit_code == DOCKER_OOM_EXIT_CODE or (self.exit_code == 1 + and killed_in_output): + self.output = _( + 'Command killed due to excessive memory ' + 'consumption\n', + ) except DockerAPIError: self.exit_code = -1 if self.output is None or not self.output: @@ -265,17 +290,25 @@ def get_wrapped_command(self): install requests<0.8``. This escapes a good majority of those characters. """ - bash_escape_re = re.compile(r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@" - r"\[\\\]\^\`\{\|\}\~])") + bash_escape_re = re.compile( + r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@" + r'\[\\\]\^\`\{\|\}\~])', + ) prefix = '' if self.bin_path: prefix += 'PATH={0}:$PATH '.format(self.bin_path) - return ("/bin/sh -c 'cd {cwd} && {prefix}{cmd}'" - .format( - cwd=self.cwd, - prefix=prefix, - cmd=(' '.join([bash_escape_re.sub(r'\\\1', part) - for part in self.command])))) + return ( + "/bin/sh -c 'cd {cwd} && {prefix}{cmd}'".format( + cwd=self.cwd, + prefix=prefix, + cmd=( + ' '.join([ + bash_escape_re.sub(r'\\\1', part) + for part in self.command + ]) + ), + ) + ) class BaseEnvironment(object): @@ -296,11 +329,13 @@ def record_command(self, command): pass def _log_warning(self, msg): - log.warning(LOG_TEMPLATE.format( - project=self.project.slug, - version='latest', - msg=msg, - )) + log.warning( + LOG_TEMPLATE.format( + project=self.project.slug, + version='latest', + msg=msg, + ), + ) def run(self, *cmd, **kwargs): """Shortcut to run command from environment.""" @@ -308,7 +343,8 @@ def run(self, *cmd, **kwargs): def run_command_class( self, cls, cmd, record=None, warn_only=False, - record_as_success=False, **kwargs): + record_as_success=False, **kwargs + ): """ Run command from this environment. @@ -409,8 +445,16 @@ class BuildEnvironment(BaseEnvironment): successful """ - def __init__(self, project=None, version=None, build=None, config=None, - record=True, environment=None, update_on_success=True): + def __init__( + self, + project=None, + version=None, + build=None, + config=None, + record=True, + environment=None, + update_on_success=True, + ): super(BuildEnvironment, self).__init__(project, environment) self.version = version self.build = build @@ -427,10 +471,13 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, tb): ret = self.handle_exception(exc_type, exc_value, tb) self.update_build(BUILD_STATE_FINISHED) - log.info(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg='Build finished')) + log.info( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg='Build finished', + ), + ) return ret def handle_exception(self, exc_type, exc_value, _): @@ -445,12 +492,15 @@ def handle_exception(self, exc_type, exc_value, _): a failure and the context will be gracefully exited. """ if exc_type is not None: - log.error(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg=exc_value), - exc_info=True) if not issubclass(exc_type, BuildEnvironmentWarning): + log.error( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=exc_value, + ), + exc_info=True, + ) self.failure = exc_value return True @@ -459,11 +509,13 @@ def record_command(self, command): def _log_warning(self, msg): # :'( - log.warning(LOG_TEMPLATE.format( - project=self.project.slug, - version=self.version.slug, - msg=msg, - )) + log.warning( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=msg, + ), + ) def run(self, *cmd, **kwargs): kwargs.update({ @@ -479,23 +531,30 @@ def run_command_class(self, *cmd, **kwargs): # pylint: disable=arguments-differ @property def successful(self): - """Is build completed, without top level failures or failing commands.""" # noqa - return (self.done and self.failure is None and - all(cmd.successful for cmd in self.commands)) + """Is build completed, without top level failures or failing + commands.""" # noqa + return ( + self.done and self.failure is None and + all(cmd.successful for cmd in self.commands) + ) @property def failed(self): """Is build completed, but has top level failure or failing commands.""" - return (self.done and ( - self.failure is not None or - any(cmd.failed for cmd in self.commands) - )) + return ( + self.done and ( + self.failure is not None or + any(cmd.failed for cmd in self.commands) + ) + ) @property def done(self): """Is build in finished state.""" - return (self.build is not None and - self.build['state'] == BUILD_STATE_FINISHED) + return ( + self.build is not None and + self.build['state'] == BUILD_STATE_FINISHED + ) def update_build(self, state=None): """ @@ -521,15 +580,18 @@ def update_build(self, state=None): # TODO drop exit_code and provide a more meaningful UX for error # reporting - if self.failure and isinstance(self.failure, - BuildEnvironmentException): + if self.failure and isinstance( + self.failure, + BuildEnvironmentException, + ): self.build['exit_code'] = self.failure.status_code elif self.commands: - self.build['exit_code'] = max([cmd.exit_code - for cmd in self.commands]) + self.build['exit_code'] = max([ + cmd.exit_code for cmd in self.commands + ],) - self.build['setup'] = self.build['setup_error'] = "" - self.build['output'] = self.build['error'] = "" + self.build['setup'] = self.build['setup_error'] = '' + self.build['output'] = self.build['error'] = '' if self.start_time: build_length = (datetime.utcnow() - self.start_time) @@ -538,21 +600,22 @@ def update_build(self, state=None): if self.failure is not None: # Surface a generic error if the class is not a # BuildEnvironmentError - if not isinstance(self.failure, - (BuildEnvironmentException, - BuildEnvironmentWarning)): + if not isinstance( + self.failure, + (BuildEnvironmentException, BuildEnvironmentWarning), + ): log.error( 'Build failed with unhandled exception: %s', str(self.failure), extra={ 'stack': True, 'tags': {'build': self.build['id']}, - } + }, ) self.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], - ) + ), ) self.build['error'] = str(self.failure) @@ -574,13 +637,12 @@ def update_build(self, state=None): try: api_v2.build(self.build['id']).put(self.build) except HttpClientError as e: - log.error( - "Unable to update build: id=%d error=%s", + log.exception( + 'Unable to update build: id=%d', self.build['id'], - e.content, ) except Exception: - log.exception("Unknown build exception") + log.exception('Unknown build exception') class LocalBuildEnvironment(BuildEnvironment): @@ -621,7 +683,7 @@ def __init__(self, *args, **kwargs): build=self.build.get('id'), project_id=self.project.pk, project_name=self.project.slug, - )[:DOCKER_HOSTNAME_MAX_LEN] + )[:DOCKER_HOSTNAME_MAX_LEN], ) if self.config and self.config.build_image: self.container_image = self.config.build_image @@ -643,18 +705,25 @@ def __enter__(self): if state is not None: if state.get('Running') is True: exc = BuildEnvironmentError( - _('A build environment is currently ' - 'running for this version')) + _( + 'A build environment is currently ' + 'running for this version', + ), + ) self.failure = exc self.build['state'] = BUILD_STATE_FINISHED raise exc else: - log.warning(LOG_TEMPLATE - .format( - project=self.project.slug, - version=self.version.slug, - msg=("Removing stale container {0}" - .format(self.container_id)))) + log.warning( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=( + 'Removing stale container {0}' + .format(self.container_id) + ), + ), + ) client = self.get_client() client.remove_container(self.container_id) except (DockerAPIError, ConnectionError): @@ -699,8 +768,7 @@ def __exit__(self, exc_type, exc_value, tb): # request. These errors should not surface to the user. except (DockerAPIError, ConnectionError): log.exception( - LOG_TEMPLATE - .format( + LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg="Couldn't remove container", @@ -717,10 +785,13 @@ def __exit__(self, exc_type, exc_value, tb): ret = self.handle_exception(exc_type, exc_value, tb) self.update_build(BUILD_STATE_FINISHED) - log.info(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg='Build finished')) + log.info( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg='Build finished', + ), + ) return ret def get_client(self): @@ -746,7 +817,7 @@ def get_client(self): raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], - ) + ), ) def get_container_host_config(self): @@ -825,14 +896,17 @@ def update_build_from_container_state(self): if state is not None and state.get('Running') is False: if state.get('ExitCode') == DOCKER_TIMEOUT_EXIT_CODE: self.failure = BuildEnvironmentError( - _('Build exited due to time out')) + _('Build exited due to time out'), + ) elif state.get('OOMKilled', False): self.failure = BuildEnvironmentError( - _('Build exited due to excessive memory consumption')) + _('Build exited due to excessive memory consumption'), + ) elif state.get('Error'): - self.failure = BuildEnvironmentError( - (_('Build exited due to unknown error: {0}') - .format(state.get('Error')))) + self.failure = BuildEnvironmentError(( + _('Build exited due to unknown error: {0}') + .format(state.get('Error')) + )) def create_container(self): """Create docker container.""" @@ -844,9 +918,12 @@ def create_container(self): ) self.container = client.create_container( image=self.container_image, - command=('/bin/sh -c "sleep {time}; exit {exit}"' - .format(time=self.container_time_limit, - exit=DOCKER_TIMEOUT_EXIT_CODE)), + command=( + '/bin/sh -c "sleep {time}; exit {exit}"'.format( + time=self.container_time_limit, + exit=DOCKER_TIMEOUT_EXIT_CODE, + ) + ), name=self.container_id, hostname=self.container_id, host_config=self.get_container_host_config(), @@ -871,12 +948,11 @@ def create_container(self): raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], - ) + ), ) except DockerAPIError as e: log.exception( - LOG_TEMPLATE - .format( + LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=e.explanation, diff --git a/readthedocs/doc_builder/templates/doc_builder/include.js.tmpl b/readthedocs/doc_builder/templates/doc_builder/include.js.tmpl index bc15165fa24..3694ba0f698 100644 --- a/readthedocs/doc_builder/templates/doc_builder/include.js.tmpl +++ b/readthedocs/doc_builder/templates/doc_builder/include.js.tmpl @@ -24,4 +24,3 @@ ga('user.send', 'pageview'); {% endif %} // end RTD Analytics Code - diff --git a/readthedocs/gold/static/gold/js/gold.js b/readthedocs/gold/static/gold/js/gold.js index 49183a61bb5..2a273189ac2 100644 --- a/readthedocs/gold/static/gold/js/gold.js +++ b/readthedocs/gold/static/gold/js/gold.js @@ -1 +1 @@ -require=function(){function e(t,r,n){function i(a,c){if(!r[a]){if(!t[a]){var u="function"==typeof require&&require;if(!c&&u)return u(a,!0);if(o)return o(a,!0);var l=new Error("Cannot find module '"+a+"'");throw l.code="MODULE_NOT_FOUND",l}var s=r[a]={exports:{}};t[a][0].call(s.exports,function(e){var r=t[a][1][e];return i(r?r:e)},s,s.exports,e,t,r,n)}return r[a].exports}for(var o="function"==typeof require&&require,a=0;a9&&(t-=9),i+=t;return i%10===0},p=function(e){var t;return null!=e.prop("selectionStart")&&e.prop("selectionStart")!==e.prop("selectionEnd")||!(null==("undefined"!=typeof document&&null!==document&&null!=(t=document.selection)?t.createRange:void 0)||!document.selection.createRange().text)},C=function(e,t){var r,n,i,o,a,c;try{n=t.prop("selectionStart")}catch(u){o=u,n=null}if(a=t.val(),t.val(e),null!==n&&t.is(":focus"))return n===a.length&&(n=e.length),a!==e&&(c=a.slice(n-1,+n+1||9e9),r=e.slice(n-1,+n+1||9e9),i=e[n],/\d/.test(i)&&c===""+i+" "&&r===" "+i&&(n+=1)),t.prop("selectionStart",n),t.prop("selectionEnd",n)},y=function(e){var t,r,n,i,o,a,c,u;for(null==e&&(e=""),n="0123456789",i="0123456789",a="",t=e.split(""),c=0,u=t.length;c-1&&(r=i[o]),a+=r;return a},m=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var e;return e=r.val(),e=y(e),e=e.replace(/\D/g,""),C(e,r)})},d=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var t;return t=r.val(),t=y(t),t=e.payment.formatCardNumber(t),C(t,r)})},c=function(r){var n,i,o,a,c,u,l;if(o=String.fromCharCode(r.which),/^\d+$/.test(o)&&(n=e(r.currentTarget),l=n.val(),i=t(l+o),a=(l.replace(/\D/g,"")+o).length,u=16,i&&(u=i.length[i.length.length-1]),!(a>=u||null!=n.prop("selectionStart")&&n.prop("selectionStart")!==l.length)))return c=i&&"amex"===i.type?/^(\d{4}|\d{4}\s\d{6})$/:/(?:^|\s)(\d{4})$/,c.test(l)?(r.preventDefault(),setTimeout(function(){return n.val(l+" "+o)})):c.test(l+o)?(r.preventDefault(),setTimeout(function(){return n.val(l+o+" ")})):void 0},o=function(t){var r,n;if(r=e(t.currentTarget),n=r.val(),8===t.which&&(null==r.prop("selectionStart")||r.prop("selectionStart")===n.length))return/\d\s$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d\s$/,""))})):/\s\d?$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d$/,""))})):void 0},v=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var t;return t=r.val(),t=y(t),t=e.payment.formatExpiry(t),C(t,r)})},u=function(t){var r,n,i;if(n=String.fromCharCode(t.which),/^\d+$/.test(n))return r=e(t.currentTarget),i=r.val()+n,/^\d$/.test(i)&&"0"!==i&&"1"!==i?(t.preventDefault(),setTimeout(function(){return r.val("0"+i+" / ")})):/^\d\d$/.test(i)?(t.preventDefault(),setTimeout(function(){var e,t;return e=parseInt(i[0],10),t=parseInt(i[1],10),t>2&&0!==e?r.val("0"+e+" / "+t):r.val(""+i+" / ")})):void 0},l=function(t){var r,n,i;if(n=String.fromCharCode(t.which),/^\d+$/.test(n))return r=e(t.currentTarget),i=r.val(),/^\d\d$/.test(i)?r.val(""+i+" / "):void 0},s=function(t){var r,n,i;if(i=String.fromCharCode(t.which),"/"===i||" "===i)return r=e(t.currentTarget),n=r.val(),/^\d$/.test(n)&&"0"!==n?r.val("0"+n+" / "):void 0},a=function(t){var r,n;if(r=e(t.currentTarget),n=r.val(),8===t.which&&(null==r.prop("selectionStart")||r.prop("selectionStart")===n.length))return/\d\s\/\s$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d\s\/\s$/,""))})):void 0},h=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var e;return e=r.val(),e=y(e),e=e.replace(/\D/g,"").slice(0,4),C(e,r)})},w=function(e){var t;return!(!e.metaKey&&!e.ctrlKey)||32!==e.which&&(0===e.which||(e.which<33||(t=String.fromCharCode(e.which),!!/[\d\s]/.test(t))))},_=function(r){var n,i,o,a;if(n=e(r.currentTarget),o=String.fromCharCode(r.which),/^\d+$/.test(o)&&!p(n))return a=(n.val()+o).replace(/\D/g,""),i=t(a),i?a.length<=i.length[i.length.length-1]:a.length<=16},b=function(t){var r,n,i;if(r=e(t.currentTarget),n=String.fromCharCode(t.which),/^\d+$/.test(n)&&!p(r))return i=r.val()+n,i=i.replace(/\D/g,""),!(i.length>6)&&void 0},g=function(t){var r,n,i;if(r=e(t.currentTarget),n=String.fromCharCode(t.which),/^\d+$/.test(n)&&!p(r))return i=r.val()+n,i.length<=4},x=function(t){var r,i,o,a,c;if(r=e(t.currentTarget),c=r.val(),a=e.payment.cardType(c)||"unknown",!r.hasClass(a))return i=function(){var e,t,r;for(r=[],e=0,t=n.length;e=0&&(r.luhn===!1||f(e))))},e.payment.validateCardExpiry=function(t,r){var n,i,o;return"object"==typeof t&&"month"in t&&(o=t,t=o.month,r=o.year),!(!t||!r)&&(t=e.trim(t),r=e.trim(r),!!/^\d+$/.test(t)&&(!!/^\d+$/.test(r)&&(1<=t&&t<=12&&(2===r.length&&(r=r<70?"20"+r:"19"+r),4===r.length&&(i=new Date(r,t),n=new Date,i.setMonth(i.getMonth()-1),i.setMonth(i.getMonth()+1,1),i>n)))))},e.payment.validateCardCVC=function(t,n){var i,o;return t=e.trim(t),!!/^\d+$/.test(t)&&(i=r(n),null!=i?(o=t.length,T.call(i.cvcLength,o)>=0):t.length>=3&&t.length<=4)},e.payment.cardType=function(e){var r;return e?(null!=(r=t(e))?r.type:void 0)||null:null},e.payment.formatCardNumber=function(r){var n,i,o,a;return r=r.replace(/\D/g,""),(n=t(r))?(o=n.length[n.length.length-1],r=r.slice(0,o),n.format.global?null!=(a=r.match(n.format))?a.join(" "):void 0:(i=n.format.exec(r),null!=i?(i.shift(),i=e.grep(i,function(e){return e}),i.join(" ")):void 0)):r},e.payment.formatExpiry=function(e){var t,r,n,i;return(r=e.match(/^\D*(\d{1,2})(\D+)?(\d{1,4})?/))?(t=r[1]||"",n=r[2]||"",i=r[3]||"",i.length>0?n=" / ":" /"===n?(t=t.substring(0,1),n=""):2===t.length||n.length>0?n=" / ":1===t.length&&"0"!==t&&"1"!==t&&(t="0"+t,n=" / "),t+n+i):""}}).call(this)},{}],2:[function(e,t,r){function n(e){var t=this,e=e||{};a.publishableKey=t.stripe_key=e.key,t.form=e.form,t.cc_number=i.observable(null),t.cc_expiry=i.observable(null),t.cc_cvv=i.observable(null),t.error_cc_number=i.observable(null),t.error_cc_expiry=i.observable(null),t.error_cc_cvv=i.observable(null),t.stripe_token=i.observable(null),t.card_digits=i.observable(null),t.is_editing_card=i.observable(!1),t.show_card_form=i.computed(function(){return t.is_editing_card()||!t.card_digits()||t.cc_number()||t.cc_expiry()||t.cc_cvv()}),t.initialize_form(),t.error=i.observable(null),t.process_form=function(){var e=o.payment.cardExpiryVal(t.cc_expiry()),r={number:t.cc_number(),exp_month:e.month,exp_year:e.year,cvc:t.cc_cvv()};return t.error(null),t.error_cc_number(null),t.error_cc_expiry(null),t.error_cc_cvv(null),o.payment.validateCardNumber(r.number)?o.payment.validateCardExpiry(r.exp_month,r.exp_year)?o.payment.validateCardCVC(r.cvc)?void a.createToken(r,function(e,r){if(r.error)if("card_error"==r.error.type){var n={invalid_number:t.error_cc_number,incorrect_number:t.error_cc_number,expired_card:t.error_cc_number,card_declined:t.error_cc_number,invalid_expiry_month:t.error_cc_expiry,invalid_expiry_year:t.error_cc_expiry,invalid_cvc:t.error_cc_cvv,incorrect_cvc:t.error_cc_cvv},i=n[r.error.code]||t.error_cc_number;i(r.error.message)}else t.error_cc_number(r.error.message);else t.submit_form(r.card.last4,r.id)}):(t.error_cc_cvv("Invalid security code"),!1):(t.error_cc_expiry("Invalid expiration date"),!1):(t.error_cc_number("Invalid card number"),!1)},t.process_full_form=function(){return!t.show_card_form()||void t.process_form()}}var i=e("knockout"),o=(e("./../../../../../bower_components/jquery.payment/lib/jquery.payment.js"),e("jquery")),a=null;"undefined"!=typeof window&&"undefined"!=typeof window.Stripe&&(a=window.Stripe||{}),i.bindingHandlers.valueInit={init:function(e,t){var r=t();i.isWriteableObservable(r)&&r(e.value)}},n.prototype.submit_form=function(e,t){this.form.find("#id_card_digits").val(e),this.form.find("#id_stripe_token").val(t),this.form.submit()},n.prototype.initialize_form=function(){var e=o("input#id_cc_number"),t=o("input#id_cc_cvv"),r=o("input#id_cc_expiry");e.payment("formatCardNumber"),r.payment("formatCardExpiry"),t.payment("formatCardCVC"),e.trigger("keyup")},n.init=function(e,t){var r=new n(e),t=t||o("#payment-form")[0];return i.applyBindings(r,t),r},t.exports.PaymentView=n},{"./../../../../../bower_components/jquery.payment/lib/jquery.payment.js":1,jquery:"jquery",knockout:"knockout"}],"gold/gold":[function(e,t,r){function n(e){var t=this,e=e||{};t.constructor.call(t,e),t.last_4_digits=o.observable(null)}var i=(e("jquery"),e("readthedocs/payments/static-src/payments/js/base")),o=e("knockout");n.prototype=new i.PaymentView,n.init=function(e,t){var r=new n(e),t=t||$("#payment-form")[0];return o.applyBindings(r,t),r},n.prototype.submit_form=function(e,t){this.form.find("#id_last_4_digits").val(e),this.form.find("#id_stripe_token").val(t),this.form.submit()},t.exports.GoldView=n},{jquery:"jquery",knockout:"knockout","readthedocs/payments/static-src/payments/js/base":2}]},{},[]); \ No newline at end of file +require=function(){function e(t,r,n){function i(a,c){if(!r[a]){if(!t[a]){var u="function"==typeof require&&require;if(!c&&u)return u(a,!0);if(o)return o(a,!0);var l=new Error("Cannot find module '"+a+"'");throw l.code="MODULE_NOT_FOUND",l}var s=r[a]={exports:{}};t[a][0].call(s.exports,function(e){var r=t[a][1][e];return i(r?r:e)},s,s.exports,e,t,r,n)}return r[a].exports}for(var o="function"==typeof require&&require,a=0;a9&&(t-=9),i+=t;return i%10===0},p=function(e){var t;return null!=e.prop("selectionStart")&&e.prop("selectionStart")!==e.prop("selectionEnd")||!(null==("undefined"!=typeof document&&null!==document&&null!=(t=document.selection)?t.createRange:void 0)||!document.selection.createRange().text)},C=function(e,t){var r,n,i,o,a,c;try{n=t.prop("selectionStart")}catch(u){o=u,n=null}if(a=t.val(),t.val(e),null!==n&&t.is(":focus"))return n===a.length&&(n=e.length),a!==e&&(c=a.slice(n-1,+n+1||9e9),r=e.slice(n-1,+n+1||9e9),i=e[n],/\d/.test(i)&&c===""+i+" "&&r===" "+i&&(n+=1)),t.prop("selectionStart",n),t.prop("selectionEnd",n)},y=function(e){var t,r,n,i,o,a,c,u;for(null==e&&(e=""),n="0123456789",i="0123456789",a="",t=e.split(""),c=0,u=t.length;c-1&&(r=i[o]),a+=r;return a},m=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var e;return e=r.val(),e=y(e),e=e.replace(/\D/g,""),C(e,r)})},d=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var t;return t=r.val(),t=y(t),t=e.payment.formatCardNumber(t),C(t,r)})},c=function(r){var n,i,o,a,c,u,l;if(o=String.fromCharCode(r.which),/^\d+$/.test(o)&&(n=e(r.currentTarget),l=n.val(),i=t(l+o),a=(l.replace(/\D/g,"")+o).length,u=16,i&&(u=i.length[i.length.length-1]),!(a>=u||null!=n.prop("selectionStart")&&n.prop("selectionStart")!==l.length)))return c=i&&"amex"===i.type?/^(\d{4}|\d{4}\s\d{6})$/:/(?:^|\s)(\d{4})$/,c.test(l)?(r.preventDefault(),setTimeout(function(){return n.val(l+" "+o)})):c.test(l+o)?(r.preventDefault(),setTimeout(function(){return n.val(l+o+" ")})):void 0},o=function(t){var r,n;if(r=e(t.currentTarget),n=r.val(),8===t.which&&(null==r.prop("selectionStart")||r.prop("selectionStart")===n.length))return/\d\s$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d\s$/,""))})):/\s\d?$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d$/,""))})):void 0},v=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var t;return t=r.val(),t=y(t),t=e.payment.formatExpiry(t),C(t,r)})},u=function(t){var r,n,i;if(n=String.fromCharCode(t.which),/^\d+$/.test(n))return r=e(t.currentTarget),i=r.val()+n,/^\d$/.test(i)&&"0"!==i&&"1"!==i?(t.preventDefault(),setTimeout(function(){return r.val("0"+i+" / ")})):/^\d\d$/.test(i)?(t.preventDefault(),setTimeout(function(){var e,t;return e=parseInt(i[0],10),t=parseInt(i[1],10),t>2&&0!==e?r.val("0"+e+" / "+t):r.val(""+i+" / ")})):void 0},l=function(t){var r,n,i;if(n=String.fromCharCode(t.which),/^\d+$/.test(n))return r=e(t.currentTarget),i=r.val(),/^\d\d$/.test(i)?r.val(""+i+" / "):void 0},s=function(t){var r,n,i;if(i=String.fromCharCode(t.which),"/"===i||" "===i)return r=e(t.currentTarget),n=r.val(),/^\d$/.test(n)&&"0"!==n?r.val("0"+n+" / "):void 0},a=function(t){var r,n;if(r=e(t.currentTarget),n=r.val(),8===t.which&&(null==r.prop("selectionStart")||r.prop("selectionStart")===n.length))return/\d\s\/\s$/.test(n)?(t.preventDefault(),setTimeout(function(){return r.val(n.replace(/\d\s\/\s$/,""))})):void 0},h=function(t){var r;return r=e(t.currentTarget),setTimeout(function(){var e;return e=r.val(),e=y(e),e=e.replace(/\D/g,"").slice(0,4),C(e,r)})},w=function(e){var t;return!(!e.metaKey&&!e.ctrlKey)||32!==e.which&&(0===e.which||(e.which<33||(t=String.fromCharCode(e.which),!!/[\d\s]/.test(t))))},_=function(r){var n,i,o,a;if(n=e(r.currentTarget),o=String.fromCharCode(r.which),/^\d+$/.test(o)&&!p(n))return a=(n.val()+o).replace(/\D/g,""),i=t(a),i?a.length<=i.length[i.length.length-1]:a.length<=16},b=function(t){var r,n,i;if(r=e(t.currentTarget),n=String.fromCharCode(t.which),/^\d+$/.test(n)&&!p(r))return i=r.val()+n,i=i.replace(/\D/g,""),!(i.length>6)&&void 0},g=function(t){var r,n,i;if(r=e(t.currentTarget),n=String.fromCharCode(t.which),/^\d+$/.test(n)&&!p(r))return i=r.val()+n,i.length<=4},x=function(t){var r,i,o,a,c;if(r=e(t.currentTarget),c=r.val(),a=e.payment.cardType(c)||"unknown",!r.hasClass(a))return i=function(){var e,t,r;for(r=[],e=0,t=n.length;e=0&&(r.luhn===!1||f(e))))},e.payment.validateCardExpiry=function(t,r){var n,i,o;return"object"==typeof t&&"month"in t&&(o=t,t=o.month,r=o.year),!(!t||!r)&&(t=e.trim(t),r=e.trim(r),!!/^\d+$/.test(t)&&(!!/^\d+$/.test(r)&&(1<=t&&t<=12&&(2===r.length&&(r=r<70?"20"+r:"19"+r),4===r.length&&(i=new Date(r,t),n=new Date,i.setMonth(i.getMonth()-1),i.setMonth(i.getMonth()+1,1),i>n)))))},e.payment.validateCardCVC=function(t,n){var i,o;return t=e.trim(t),!!/^\d+$/.test(t)&&(i=r(n),null!=i?(o=t.length,T.call(i.cvcLength,o)>=0):t.length>=3&&t.length<=4)},e.payment.cardType=function(e){var r;return e?(null!=(r=t(e))?r.type:void 0)||null:null},e.payment.formatCardNumber=function(r){var n,i,o,a;return r=r.replace(/\D/g,""),(n=t(r))?(o=n.length[n.length.length-1],r=r.slice(0,o),n.format.global?null!=(a=r.match(n.format))?a.join(" "):void 0:(i=n.format.exec(r),null!=i?(i.shift(),i=e.grep(i,function(e){return e}),i.join(" ")):void 0)):r},e.payment.formatExpiry=function(e){var t,r,n,i;return(r=e.match(/^\D*(\d{1,2})(\D+)?(\d{1,4})?/))?(t=r[1]||"",n=r[2]||"",i=r[3]||"",i.length>0?n=" / ":" /"===n?(t=t.substring(0,1),n=""):2===t.length||n.length>0?n=" / ":1===t.length&&"0"!==t&&"1"!==t&&(t="0"+t,n=" / "),t+n+i):""}}).call(this)},{}],2:[function(e,t,r){function n(e){var t=this,e=e||{};a.publishableKey=t.stripe_key=e.key,t.form=e.form,t.cc_number=i.observable(null),t.cc_expiry=i.observable(null),t.cc_cvv=i.observable(null),t.error_cc_number=i.observable(null),t.error_cc_expiry=i.observable(null),t.error_cc_cvv=i.observable(null),t.stripe_token=i.observable(null),t.card_digits=i.observable(null),t.is_editing_card=i.observable(!1),t.show_card_form=i.computed(function(){return t.is_editing_card()||!t.card_digits()||t.cc_number()||t.cc_expiry()||t.cc_cvv()}),t.initialize_form(),t.error=i.observable(null),t.process_form=function(){var e=o.payment.cardExpiryVal(t.cc_expiry()),r={number:t.cc_number(),exp_month:e.month,exp_year:e.year,cvc:t.cc_cvv()};return t.error(null),t.error_cc_number(null),t.error_cc_expiry(null),t.error_cc_cvv(null),o.payment.validateCardNumber(r.number)?o.payment.validateCardExpiry(r.exp_month,r.exp_year)?o.payment.validateCardCVC(r.cvc)?void a.createToken(r,function(e,r){if(r.error)if("card_error"==r.error.type){var n={invalid_number:t.error_cc_number,incorrect_number:t.error_cc_number,expired_card:t.error_cc_number,card_declined:t.error_cc_number,invalid_expiry_month:t.error_cc_expiry,invalid_expiry_year:t.error_cc_expiry,invalid_cvc:t.error_cc_cvv,incorrect_cvc:t.error_cc_cvv},i=n[r.error.code]||t.error_cc_number;i(r.error.message)}else t.error_cc_number(r.error.message);else t.submit_form(r.card.last4,r.id)}):(t.error_cc_cvv("Invalid security code"),!1):(t.error_cc_expiry("Invalid expiration date"),!1):(t.error_cc_number("Invalid card number"),!1)},t.process_full_form=function(){return!t.show_card_form()||void t.process_form()}}var i=e("knockout"),o=(e("./../../../../../bower_components/jquery.payment/lib/jquery.payment.js"),e("jquery")),a=null;"undefined"!=typeof window&&"undefined"!=typeof window.Stripe&&(a=window.Stripe||{}),i.bindingHandlers.valueInit={init:function(e,t){var r=t();i.isWriteableObservable(r)&&r(e.value)}},n.prototype.submit_form=function(e,t){this.form.find("#id_card_digits").val(e),this.form.find("#id_stripe_token").val(t),this.form.submit()},n.prototype.initialize_form=function(){var e=o("input#id_cc_number"),t=o("input#id_cc_cvv"),r=o("input#id_cc_expiry");e.payment("formatCardNumber"),r.payment("formatCardExpiry"),t.payment("formatCardCVC"),e.trigger("keyup")},n.init=function(e,t){var r=new n(e),t=t||o("#payment-form")[0];return i.applyBindings(r,t),r},t.exports.PaymentView=n},{"./../../../../../bower_components/jquery.payment/lib/jquery.payment.js":1,jquery:"jquery",knockout:"knockout"}],"gold/gold":[function(e,t,r){function n(e){var t=this,e=e||{};t.constructor.call(t,e),t.last_4_digits=o.observable(null)}var i=(e("jquery"),e("readthedocs/payments/static-src/payments/js/base")),o=e("knockout");n.prototype=new i.PaymentView,n.init=function(e,t){var r=new n(e),t=t||$("#payment-form")[0];return o.applyBindings(r,t),r},n.prototype.submit_form=function(e,t){this.form.find("#id_last_4_digits").val(e),this.form.find("#id_stripe_token").val(t),this.form.submit()},t.exports.GoldView=n},{jquery:"jquery",knockout:"knockout","readthedocs/payments/static-src/payments/js/base":2}]},{},[]); diff --git a/readthedocs/oauth/services/base.py b/readthedocs/oauth/services/base.py index bc56e0057e3..528518097fc 100644 --- a/readthedocs/oauth/services/base.py +++ b/readthedocs/oauth/services/base.py @@ -112,6 +112,7 @@ def token_updater(self, token): u'expires_at': 1449218652.558185 } """ + def _updater(data): token.token = data['access_token'] token.expires_at = datetime.fromtimestamp(data['expires_at']) @@ -138,7 +139,7 @@ def paginate(self, url, **kwargs): return results # Catch specific exception related to OAuth except InvalidClientIdError: - log.error('access_token or refresh_token failed: %s', url) + log.warning('access_token or refresh_token failed: %s', url) raise Exception('You should reconnect your account') # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): @@ -149,7 +150,10 @@ def paginate(self, url, **kwargs): except ValueError: debug_data = resp.content log.debug( - 'paginate failed at %s with response: %s', url, debug_data) + 'Paginate failed at %s with response: %s', + url, + debug_data, + ) else: return [] @@ -200,4 +204,5 @@ def is_project_service(cls, project): # TODO Replace this check by keying project to remote repos return ( cls.url_pattern is not None and - cls.url_pattern.search(project.repo) is not None) + cls.url_pattern.search(project.repo) is not None + ) diff --git a/readthedocs/oauth/services/bitbucket.py b/readthedocs/oauth/services/bitbucket.py index 5f997a2b26a..ac0a15bba18 100644 --- a/readthedocs/oauth/services/bitbucket.py +++ b/readthedocs/oauth/services/bitbucket.py @@ -1,16 +1,18 @@ +# -*- coding: utf-8 -*- """OAuth utility functions.""" -from __future__ import absolute_import -from builtins import str -import logging +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import json +import logging import re +from allauth.socialaccount.providers.bitbucket_oauth2.views import ( + BitbucketOAuth2Adapter) from django.conf import settings from django.core.urlresolvers import reverse from requests.exceptions import RequestException -from allauth.socialaccount.providers.bitbucket_oauth2.views import ( - BitbucketOAuth2Adapter) from readthedocs.builds import utils as build_utils from readthedocs.integrations.models import Integration @@ -18,7 +20,6 @@ from ..models import RemoteOrganization, RemoteRepository from .base import Service - log = logging.getLogger(__name__) @@ -41,26 +42,30 @@ def sync_repositories(self): # Get user repos try: repos = self.paginate( - 'https://bitbucket.org/api/2.0/repositories/?role=member') + 'https://bitbucket.org/api/2.0/repositories/?role=member', + ) for repo in repos: self.create_repository(repo) except (TypeError, ValueError) as e: - log.error('Error syncing Bitbucket repositories: %s', - str(e), exc_info=True) - raise Exception('Could not sync your Bitbucket repositories, ' - 'try reconnecting your account') + log.exception('Error syncing Bitbucket repositories') + raise Exception( + 'Could not sync your Bitbucket repositories, ' + 'try reconnecting your account', + ) # Because privileges aren't returned with repository data, run query # again for repositories that user has admin role for, and update # existing repositories. try: resp = self.paginate( - 'https://bitbucket.org/api/2.0/repositories/?role=admin') + 'https://bitbucket.org/api/2.0/repositories/?role=admin', + ) repos = ( - RemoteRepository.objects - .filter(users=self.user, - full_name__in=[r['full_name'] for r in resp], - account=self.account) + RemoteRepository.objects.filter( + users=self.user, + full_name__in=[r['full_name'] for r in resp], + account=self.account, + ) ) for repo in repos: repo.admin = True @@ -72,7 +77,7 @@ def sync_teams(self): """Sync Bitbucket teams and team repositories.""" try: teams = self.paginate( - 'https://api.bitbucket.org/2.0/teams/?role=member' + 'https://api.bitbucket.org/2.0/teams/?role=member', ) for team in teams: org = self.create_organization(team) @@ -80,10 +85,11 @@ def sync_teams(self): for repo in repos: self.create_repository(repo, organization=org) except ValueError as e: - log.error('Error syncing Bitbucket organizations: %s', - str(e), exc_info=True) - raise Exception('Could not sync your Bitbucket team repositories, ' - 'try reconnecting your account') + log.exception('Error syncing Bitbucket organizations') + raise Exception( + 'Could not sync your Bitbucket team repositories, ' + 'try reconnecting your account', + ) def create_repository(self, fields, privacy=None, organization=None): """ @@ -101,17 +107,17 @@ def create_repository(self, fields, privacy=None, organization=None): :rtype: RemoteRepository """ privacy = privacy or settings.DEFAULT_PRIVACY_LEVEL - if ( - (privacy == 'private') or - (fields['is_private'] is False and privacy == 'public') - ): + if ((privacy == 'private') or + (fields['is_private'] is False and privacy == 'public')): repo, _ = RemoteRepository.objects.get_or_create( full_name=fields['full_name'], account=self.account, ) if repo.organization and repo.organization != organization: - log.debug('Not importing %s because mismatched orgs', - fields['name']) + log.debug( + 'Not importing %s because mismatched orgs', + fields['name'], + ) return None else: repo.organization = organization @@ -125,7 +131,7 @@ def create_repository(self, fields, privacy=None, organization=None): for u in fields['links']['clone']) repo.clone_url = self.https_url_pattern.sub( 'https://bitbucket.org/', - clone_urls.get('https') + clone_urls.get('https'), ) repo.ssh_url = clone_urls.get('ssh') if repo.private: @@ -144,8 +150,10 @@ def create_repository(self, fields, privacy=None, organization=None): repo.save() return repo else: - log.debug('Not importing %s because mismatched type', - fields['name']) + log.debug( + 'Not importing %s because mismatched type', + fields['name'], + ) def create_organization(self, fields): """ @@ -179,14 +187,18 @@ def get_paginated_results(self, response): def get_webhook_data(self, project, integration): """Get webhook JSON data to post to the API.""" return json.dumps({ - 'description': 'Read the Docs ({domain})'.format(domain=settings.PRODUCTION_DOMAIN), + 'description': 'Read the Docs ({domain})'.format( + domain=settings.PRODUCTION_DOMAIN, + ), 'url': 'https://{domain}{path}'.format( domain=settings.PRODUCTION_DOMAIN, path=reverse( 'api_webhook', - kwargs={'project_slug': project.slug, - 'integration_pk': integration.pk} - ) + kwargs={ + 'project_slug': project.slug, + 'integration_pk': integration.pk, + }, + ), ), 'active': True, 'events': ['repo:push'], @@ -211,28 +223,38 @@ def setup_webhook(self, project): resp = None try: resp = session.post( - ('https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/hooks' - .format(owner=owner, repo=repo)), + ( + 'https://api.bitbucket.org/2.0/repositories/{owner}/{repo}/hooks' + .format(owner=owner, repo=repo) + ), data=data, - headers={'content-type': 'application/json'} + headers={'content-type': 'application/json'}, ) if resp.status_code == 201: recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('Bitbucket webhook creation successful for project: %s', - project) + log.info( + 'Bitbucket webhook creation successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('Bitbucket webhook creation failed for project: %s', - project, exc_info=True) + log.exception( + 'Bitbucket webhook creation failed for project: %s', + project, + ) else: - log.error('Bitbucket webhook creation failed for project: %s', - project) + log.exception( + 'Bitbucket webhook creation failed for project: %s', + project, + ) try: - log.debug('Bitbucket webhook creation failure response: %s', - resp.json()) + log.debug( + 'Bitbucket webhook creation failure response: %s', + resp.json(), + ) except ValueError: pass return (False, resp) @@ -257,26 +279,35 @@ def update_webhook(self, project, integration): resp = session.put( url, data=data, - headers={'content-type': 'application/json'} + headers={'content-type': 'application/json'}, ) if resp.status_code == 200: recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('Bitbucket webhook update successful for project: %s', - project) + log.info( + 'Bitbucket webhook update successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (KeyError, RequestException, ValueError): - log.error('Bitbucket webhook update failed for project: %s', - project, exc_info=True) + log.exception( + 'Bitbucket webhook update failed for project: %s', + project, + ) else: - log.error('Bitbucket webhook update failed for project: %s', - project) + log.exception( + 'Bitbucket webhook update failed for project: %s', + project, + ) # Response data should always be JSON, still try to log if not though try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('Bitbucket webhook update failure response: %s', debug_data) + log.debug( + 'Bitbucket webhook update failure response: %s', + debug_data, + ) return (False, resp) diff --git a/readthedocs/oauth/services/github.py b/readthedocs/oauth/services/github.py index 7be4a38c20b..51b0bc0ae3e 100644 --- a/readthedocs/oauth/services/github.py +++ b/readthedocs/oauth/services/github.py @@ -1,16 +1,18 @@ +# -*- coding: utf-8 -*- """OAuth utility functions.""" -from __future__ import absolute_import -from builtins import str -import logging +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import json +import logging import re +from allauth.socialaccount.models import SocialToken +from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter from django.conf import settings from django.core.urlresolvers import reverse from requests.exceptions import RequestException -from allauth.socialaccount.models import SocialToken -from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter from readthedocs.builds import utils as build_utils from readthedocs.integrations.models import Integration @@ -42,10 +44,11 @@ def sync_repositories(self): for repo in repos: self.create_repository(repo) except (TypeError, ValueError) as e: - log.error('Error syncing GitHub repositories: %s', - str(e), exc_info=True) - raise Exception('Could not sync your GitHub repositories, ' - 'try reconnecting your account') + log.exception('Error syncing GitHub repositories') + raise Exception( + 'Could not sync your GitHub repositories, ' + 'try reconnecting your account', + ) def sync_organizations(self): """Sync organizations from GitHub API.""" @@ -57,15 +60,16 @@ def sync_organizations(self): # Add repos # TODO ?per_page=100 org_repos = self.paginate( - '{org_url}/repos'.format(org_url=org['url']) + '{org_url}/repos'.format(org_url=org['url']), ) for repo in org_repos: self.create_repository(repo, organization=org_obj) except (TypeError, ValueError) as e: - log.error('Error syncing GitHub organizations: %s', - str(e), exc_info=True) - raise Exception('Could not sync your GitHub organizations, ' - 'try reconnecting your account') + log.exception('Error syncing GitHub organizations') + raise Exception( + 'Could not sync your GitHub organizations, ' + 'try reconnecting your account', + ) def create_repository(self, fields, privacy=None, organization=None): """ @@ -78,10 +82,8 @@ def create_repository(self, fields, privacy=None, organization=None): :rtype: RemoteRepository """ privacy = privacy or settings.DEFAULT_PRIVACY_LEVEL - if ( - (privacy == 'private') or - (fields['private'] is False and privacy == 'public') - ): + if ((privacy == 'private') or + (fields['private'] is False and privacy == 'public')): try: repo = RemoteRepository.objects.get( full_name=fields['full_name'], @@ -95,8 +97,10 @@ def create_repository(self, fields, privacy=None, organization=None): ) repo.users.add(self.user) if repo.organization and repo.organization != organization: - log.debug('Not importing %s because mismatched orgs', - fields['name']) + log.debug( + 'Not importing %s because mismatched orgs', + fields['name'], + ) return None else: repo.organization = organization @@ -119,8 +123,10 @@ def create_repository(self, fields, privacy=None, organization=None): repo.save() return repo else: - log.debug('Not importing %s because mismatched type', - fields['name']) + log.debug( + 'Not importing %s because mismatched type', + fields['name'], + ) def create_organization(self, fields): """ @@ -168,9 +174,11 @@ def get_webhook_data(self, project, integration): domain=settings.PRODUCTION_DOMAIN, path=reverse( 'api_webhook', - kwargs={'project_slug': project.slug, - 'integration_pk': integration.pk} - ) + kwargs={ + 'project_slug': project.slug, + 'integration_pk': integration.pk, + }, + ), ), 'content_type': 'json', }, @@ -196,33 +204,44 @@ def setup_webhook(self, project): resp = None try: resp = session.post( - ('https://api.github.com/repos/{owner}/{repo}/hooks' - .format(owner=owner, repo=repo)), + ( + 'https://api.github.com/repos/{owner}/{repo}/hooks' + .format(owner=owner, repo=repo) + ), data=data, - headers={'content-type': 'application/json'} + headers={'content-type': 'application/json'}, ) # GitHub will return 200 if already synced if resp.status_code in [200, 201]: recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('GitHub webhook creation successful for project: %s', - project) + log.info( + 'GitHub webhook creation successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('GitHub webhook creation failed for project: %s', - project, exc_info=True) + log.exception( + 'GitHub webhook creation failed for project: %s', + project, + ) else: - log.error('GitHub webhook creation failed for project: %s', - project) - # Response data should always be JSON, still try to log if not though + log.exception( + 'GitHub webhook creation failed for project: %s', + project, + ) + # Response data should always be JSON, still try to log if not + # though try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('GitHub webhook creation failure response: %s', - debug_data) + log.debug( + 'GitHub webhook creation failure response: %s', + debug_data, + ) return (False, resp) def update_webhook(self, project, integration): @@ -244,29 +263,37 @@ def update_webhook(self, project, integration): resp = session.patch( url, data=data, - headers={'content-type': 'application/json'} + headers={'content-type': 'application/json'}, ) # GitHub will return 200 if already synced if resp.status_code in [200, 201]: recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('GitHub webhook creation successful for project: %s', - project) + log.info( + 'GitHub webhook creation successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('GitHub webhook update failed for project: %s', - project, exc_info=True) + log.exception( + 'GitHub webhook update failed for project: %s', + project, + ) else: - log.error('GitHub webhook update failed for project: %s', - project) + log.exception( + 'GitHub webhook update failed for project: %s', + project, + ) try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('GitHub webhook creation failure response: %s', - debug_data) + log.debug( + 'GitHub webhook creation failure response: %s', + debug_data, + ) return (False, resp) @classmethod @@ -283,7 +310,8 @@ def get_token_for_project(cls, project, force_local=False): for user in project.users.all(): tokens = SocialToken.objects.filter( account__user=user, - app__provider=cls.adapter.provider_id) + app__provider=cls.adapter.provider_id, + ) if tokens.exists(): token = tokens[0].token except Exception: diff --git a/readthedocs/oauth/services/gitlab.py b/readthedocs/oauth/services/gitlab.py index 4919ef88a55..9307065ef5a 100644 --- a/readthedocs/oauth/services/gitlab.py +++ b/readthedocs/oauth/services/gitlab.py @@ -39,7 +39,8 @@ class GitLabService(Service): # Just use the network location to determine if it's a GitLab project # because private repos have another base url, eg. git@gitlab.example.com url_pattern = re.compile( - re.escape(urlparse(adapter.provider_base_url).netloc)) + re.escape(urlparse(adapter.provider_base_url).netloc), + ) def get_next_url_to_paginate(self, response): return response.links.get('next', {}).get('url') @@ -73,7 +74,8 @@ def sync_repositories(self): log.exception('Error syncing GitLab repositories') raise Exception( 'Could not sync your GitLab repositories, try reconnecting ' - 'your account') + 'your account', + ) def sync_organizations(self): orgs = self.paginate( @@ -103,7 +105,8 @@ def sync_organizations(self): log.exception('Error syncing GitLab organizations') raise Exception( 'Could not sync your GitLab organization, try reconnecting ' - 'your account') + 'your account', + ) def is_owned_by(self, owner_id): return self.account.extra_data['id'] == owner_id @@ -329,14 +332,21 @@ def update_webhook(self, project, integration): integration.provider_data = recv_data integration.save() log.info( - 'GitLab webhook update successful for project: %s', project) + 'GitLab webhook update successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): log.exception( - 'GitLab webhook update failed for project: %s', project) + 'GitLab webhook update failed for project: %s', + project, + ) else: - log.error('GitLab webhook update failed for project: %s', project) + log.exception( + 'GitLab webhook update failed for project: %s', + project, + ) try: debug_data = resp.json() except ValueError: diff --git a/readthedocs/payments/forms.py b/readthedocs/payments/forms.py index 0d655ef1584..e8f2d29e6d3 100644 --- a/readthedocs/payments/forms.py +++ b/readthedocs/payments/forms.py @@ -1,14 +1,16 @@ +# -*- coding: utf-8 -*- """Payment forms.""" -from __future__ import absolute_import -from builtins import str -from builtins import object +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import logging -from stripe.resource import Customer, Charge -from stripe.error import InvalidRequestError +from builtins import object, str from django import forms from django.utils.translation import ugettext_lazy as _ +from stripe.error import InvalidRequestError +from stripe.resource import Charge, Customer from .utils import stripe @@ -38,23 +40,29 @@ def get_customer_kwargs(self): raise NotImplementedError def get_customer(self): - return self.ensure_stripe_resource(resource=Customer, - attrs=self.get_customer_kwargs()) + return self.ensure_stripe_resource( + resource=Customer, + attrs=self.get_customer_kwargs(), + ) def get_subscription_kwargs(self): raise NotImplementedError def get_subscription(self): customer = self.get_customer() - return self.ensure_stripe_resource(resource=customer.subscriptions, - attrs=self.get_subscription_kwargs()) + return self.ensure_stripe_resource( + resource=customer.subscriptions, + attrs=self.get_subscription_kwargs(), + ) def get_charge_kwargs(self): raise NotImplementedError def get_charge(self): - return self.ensure_stripe_resource(resource=Charge, - attrs=self.get_charge_kwargs()) + return self.ensure_stripe_resource( + resource=Charge, + attrs=self.get_charge_kwargs(), + ) class StripeModelForm(forms.ModelForm): @@ -74,41 +82,58 @@ class StripeModelForm(forms.ModelForm): # Stripe token input from Stripe.js stripe_token = forms.CharField( required=False, - widget=forms.HiddenInput(attrs={ - 'data-bind': 'valueInit: stripe_token', - }) + widget=forms.HiddenInput( + attrs={ + 'data-bind': 'valueInit: stripe_token', + }, + ), ) # Fields used for fetching token with javascript, listed as form fields so # that data can survive validation errors cc_number = forms.CharField( label=_('Card number'), - widget=forms.TextInput(attrs={ - 'data-bind': ('valueInit: cc_number, ' - 'textInput: cc_number, ' - '''css: {'field-error': error_cc_number() != null}''') - }), + widget=forms.TextInput( + attrs={ + 'data-bind': ( + 'valueInit: cc_number, ' + 'textInput: cc_number, ' + '''css: {'field-error': error_cc_number() != null}''' + ), + }, + ), max_length=25, - required=False) + required=False, + ) cc_expiry = forms.CharField( label=_('Card expiration'), - widget=forms.TextInput(attrs={ - 'data-bind': ('valueInit: cc_expiry, ' - 'textInput: cc_expiry, ' - '''css: {'field-error': error_cc_expiry() != null}''') - }), + widget=forms.TextInput( + attrs={ + 'data-bind': ( + 'valueInit: cc_expiry, ' + 'textInput: cc_expiry, ' + '''css: {'field-error': error_cc_expiry() != null}''' + ), + }, + ), max_length=10, - required=False) + required=False, + ) cc_cvv = forms.CharField( label=_('Card CVV'), - widget=forms.TextInput(attrs={ - 'data-bind': ('valueInit: cc_cvv, ' - 'textInput: cc_cvv, ' - '''css: {'field-error': error_cc_cvv() != null}'''), - 'autocomplete': 'off', - }), + widget=forms.TextInput( + attrs={ + 'data-bind': ( + 'valueInit: cc_cvv, ' + 'textInput: cc_cvv, ' + '''css: {'field-error': error_cc_cvv() != null}''' + ), + 'autocomplete': 'off', + }, + ), max_length=8, - required=False) + required=False, + ) def __init__(self, *args, **kwargs): self.customer = kwargs.pop('customer', None) @@ -166,10 +191,10 @@ def clean(self): forms.ValidationError(str(e)), ) except stripe.error.StripeError as e: - log.error('There was a problem communicating with Stripe: %s', - str(e), exc_info=True) + log.exception('There was a problem communicating with Stripe') raise forms.ValidationError( - _('There was a problem communicating with Stripe')) + _('There was a problem communicating with Stripe'), + ) return cleaned_data def clear_card_data(self): @@ -182,13 +207,12 @@ def clear_card_data(self): try: self.data['stripe_token'] = None except AttributeError: - raise AttributeError('Form was passed immutable QueryDict POST data') + raise AttributeError( + 'Form was passed immutable QueryDict POST data', + ) def fields_with_cc_group(self): - group = { - 'is_cc_group': True, - 'fields': [] - } + group = {'is_cc_group': True, 'fields': []} for field in self: if field.name in ['cc_number', 'cc_expiry', 'cc_cvv']: group['fields'].append(field) diff --git a/readthedocs/projects/search_indexes.py b/readthedocs/projects/search_indexes.py index 5300fea5b9a..7de500ee3b1 100644 --- a/readthedocs/projects/search_indexes.py +++ b/readthedocs/projects/search_indexes.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - """ Project search indexes. @@ -9,8 +8,11 @@ maintain this code. Use at your own risk, this may go away soon. """ -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import codecs +import logging import os from django.conf import settings @@ -21,7 +23,6 @@ from readthedocs.projects import constants from readthedocs.projects.models import ImportedFile, Project -import logging log = logging.getLogger(__name__) @@ -85,24 +86,41 @@ def prepare_text(self, obj): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', - file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return log.debug('(Search Index) Indexing %s:%s', obj.project, obj.path) - document_pyquery_path = getattr(settings, 'DOCUMENT_PYQUERY_PATH', - 'div.document') + document_pyquery_path = getattr( + settings, + 'DOCUMENT_PYQUERY_PATH', + 'div.document', + ) try: - to_index = strip_tags(PyQuery(content)( - document_pyquery_path).html()).replace(u'¶', '') + to_index = strip_tags( + PyQuery(content)(document_pyquery_path).html(), + ).replace( + u'¶', + '', + ) except ValueError: # Pyquery returns ValueError if div.document doesn't exist. return if not to_index: - log.info('(Search Index) Unable to index file: %s:%s, empty file', - obj.project, file_path) + log.info( + '(Search Index) Unable to index file: %s:%s, empty file', + obj.project, + file_path, + ) else: - log.debug('(Search Index) %s:%s length: %s', obj.project, file_path, - len(to_index)) + log.debug( + '(Search Index) %s:%s length: %s', + obj.project, + file_path, + len(to_index), + ) return to_index def get_model(self): @@ -110,5 +128,8 @@ def get_model(self): def index_queryset(self, using=None): """Used when the entire index for model is updated.""" - return (self.get_model().objects - .filter(project__privacy_level=constants.PUBLIC)) + return ( + self.get_model().objects.filter( + project__privacy_level=constants.PUBLIC, + ) + ) diff --git a/readthedocs/projects/static/projects/js/import.js b/readthedocs/projects/static/projects/js/import.js index 56721b9aafe..adea0f96f11 100644 --- a/readthedocs/projects/static/projects/js/import.js +++ b/readthedocs/projects/static/projects/js/import.js @@ -1 +1 @@ -require=function(){function e(r,t,n){function a(s,i){if(!t[s]){if(!r[s]){var u="function"==typeof require&&require;if(!i&&u)return u(s,!0);if(o)return o(s,!0);var c=new Error("Cannot find module '"+s+"'");throw c.code="MODULE_NOT_FOUND",c}var l=t[s]={exports:{}};r[s][0].call(l.exports,function(e){var t=r[s][1][e];return a(t?t:e)},l,l.exports,e,r,t,n)}return t[s].exports}for(var o="function"==typeof require&&require,s=0;s0)setTimeout(r,2e3);else{var a=e.responseJSON.detail||e.statusText;t.reject({message:a})}})}var t=o.Deferred(),n=5;return setTimeout(r,2e3),t}function a(e){var r=o.Deferred(),t=e.url,a=e.token,s={csrfmiddlewaretoken:a};return $.ajax({method:"POST",url:t,data:s,success:function(e){n(e).then(function(){r.resolve()}).fail(function(e){r.reject(e)})},error:function(e){var t=e.responseJSON.detail||e.statusText;r.reject({message:t})}}),r}var o=e("jquery");r.exports={poll_task:n,trigger_task:a}},{jquery:"jquery"}],"projects/import":[function(e,r,t){function n(e,r){var t=c("").attr("href",e).get(0);return Object.keys(r).map(function(e){t.search&&(t.search+="&"),t.search+=e+"="+r[e]}),t.href}function a(e,r){var t=this;t.id=u.observable(e.id),t.name=u.observable(e.name),t.slug=u.observable(e.slug),t.active=u.observable(e.active),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.display_name=u.computed(function(){return t.name()||t.slug()}),t.filter_id=u.computed(function(){return t.id()}),t.filter_type="org",t.filtered=u.computed(function(){var e=r.filter_by();return e.id&&e.id!==t.filter_id()||e.type&&e.type!==t.filter_type})}function o(e,r){var t=this;t.id=u.observable(e.id),t.username=u.observable(e.username),t.active=u.observable(e.active),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.provider=u.observable(e.provider),t.display_name=u.computed(function(){return t.username()}),t.filter_id=u.computed(function(){return t.provider().id}),t.filter_type="own",t.filtered=u.computed(function(){var e=r.filter_by();return e.id&&e.id!==t.filter_id()||e.type&&e.type!==t.filter_type})}function s(e,r){var t=this;t.id=u.observable(e.id),t.name=u.observable(e.name),t.full_name=u.observable(e.full_name),t.description=u.observable(e.description),t.vcs=u.observable(e.vcs),t.organization=u.observable(e.organization),t.html_url=u.observable(e.html_url),t.clone_url=u.observable(e.clone_url),t.ssh_url=u.observable(e.ssh_url),t.matches=u.observable(e.matches),t.match=u.computed(function(){var e=t.matches();if(e&&e.length>0)return e[0]}),t["private"]=u.observable(e["private"]),t.active=u.observable(e.active),t.admin=u.observable(e.admin),t.is_locked=u.computed(function(){return t["private"]()&&!t.admin()}),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.import_repo=function(){var e={name:t.name(),repo:t.clone_url(),repo_type:t.vcs(),description:t.description(),project_url:t.html_url(),remote_repository:t.id()},n=c("
    ");n.attr("action",r.urls.projects_import).attr("method","POST").hide(),Object.keys(e).map(function(r){var t=c("").attr("type","hidden").attr("name",r).attr("value",e[r]);n.append(t)});var a=c("").attr("type","hidden").attr("name","csrfmiddlewaretoken").attr("value",r.csrf_token);n.append(a);var o=c("").attr("type","submit");n.append(o),c("body").append(n),n.submit()}}function i(e,r){var t=this;t.config=r||{},t.urls=r.urls||{},t.csrf_token=r.csrf_token||"",t.error=u.observable(null),t.is_syncing=u.observable(!1),t.is_ready=u.observable(!1),t.page_current=u.observable(null),t.page_next=u.observable(null),t.page_previous=u.observable(null),t.filter_by=u.observable({id:null,type:null}),t.accounts_raw=u.observableArray(),t.organizations_raw=u.observableArray(),t.filters=u.computed(function(){var e,r=[],n=t.accounts_raw(),s=t.organizations_raw();for(e in n){var i=new o(n[e],t);r.push(i)}for(e in s){var u=new a(s[e],t);r.push(u)}return r}),t.projects=u.observableArray(),u.computed(function(){var e=t.filter_by(),r=t.page_current()||t.urls["remoterepository-list"];t.page_current()||("org"===e.type&&(r=n(t.urls["remoterepository-list"],{org:e.id})),"own"===e.type&&(r=n(t.urls["remoterepository-list"],{own:e.id}))),t.error(null),c.getJSON(r).success(function(e){var r=[];t.page_next(e.next),t.page_previous(e.previous);var n;for(n in e.results){var a=new s(e.results[n],t);r.push(a)}t.projects(r)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})}).always(function(){t.is_ready(!0)})}).extend({deferred:!0}),t.get_organizations=function(){c.getJSON(t.urls["remoteorganization-list"]).success(function(e){t.organizations_raw(e.results)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})})},t.get_accounts=function(){c.getJSON(t.urls["remoteaccount-list"]).success(function(e){t.accounts_raw(e.results)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})})},t.sync_projects=function(){var e=t.urls.api_sync_remote_repositories;t.error(null),t.is_syncing(!0),l.trigger_task({url:e,token:t.csrf_token}).then(function(e){t.get_organizations(),t.get_accounts()}).fail(function(e){t.error(e)}).always(function(){t.is_syncing(!1)})},t.has_projects=u.computed(function(){return t.projects().length>0}),t.next_page=function(){t.page_current(t.page_next())},t.previous_page=function(){t.page_current(t.page_previous())},t.set_filter_by=function(e,r){var n=t.filter_by();n.id===e?(n.id=null,n.type=null):(n.id=e,n.type=r),t.filter_by(n),n.id&&t.page_current(null)}}var u=e("knockout"),c=e("jquery"),l=e("readthedocs/core/static-src/core/js/tasks");c(function(){var e=c("#id_repo"),r=c("#id_repo_type");e.blur(function(){var t,n=e.val();switch(!0){case/^hg/.test(n):t="hg";break;case/^bzr/.test(n):case/launchpad/.test(n):t="bzr";break;case/trunk/.test(n):case/^svn/.test(n):t="svn";break;default:case/github/.test(n):case/(^git|\.git$)/.test(n):t="git"}r.val(t)})}),i.init=function(e,r,t){var n=new i(r,t);return n.get_accounts(),n.get_organizations(),u.applyBindings(n,e),n},r.exports.ProjectImportView=i},{jquery:"jquery",knockout:"knockout","readthedocs/core/static-src/core/js/tasks":1}]},{},[]); \ No newline at end of file +require=function(){function e(r,t,n){function a(s,i){if(!t[s]){if(!r[s]){var u="function"==typeof require&&require;if(!i&&u)return u(s,!0);if(o)return o(s,!0);var c=new Error("Cannot find module '"+s+"'");throw c.code="MODULE_NOT_FOUND",c}var l=t[s]={exports:{}};r[s][0].call(l.exports,function(e){var t=r[s][1][e];return a(t?t:e)},l,l.exports,e,r,t,n)}return t[s].exports}for(var o="function"==typeof require&&require,s=0;s0)setTimeout(r,2e3);else{var a=e.responseJSON.detail||e.statusText;t.reject({message:a})}})}var t=o.Deferred(),n=5;return setTimeout(r,2e3),t}function a(e){var r=o.Deferred(),t=e.url,a=e.token,s={csrfmiddlewaretoken:a};return $.ajax({method:"POST",url:t,data:s,success:function(e){n(e).then(function(){r.resolve()}).fail(function(e){r.reject(e)})},error:function(e){var t=e.responseJSON.detail||e.statusText;r.reject({message:t})}}),r}var o=e("jquery");r.exports={poll_task:n,trigger_task:a}},{jquery:"jquery"}],"projects/import":[function(e,r,t){function n(e,r){var t=c("").attr("href",e).get(0);return Object.keys(r).map(function(e){t.search&&(t.search+="&"),t.search+=e+"="+r[e]}),t.href}function a(e,r){var t=this;t.id=u.observable(e.id),t.name=u.observable(e.name),t.slug=u.observable(e.slug),t.active=u.observable(e.active),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.display_name=u.computed(function(){return t.name()||t.slug()}),t.filter_id=u.computed(function(){return t.id()}),t.filter_type="org",t.filtered=u.computed(function(){var e=r.filter_by();return e.id&&e.id!==t.filter_id()||e.type&&e.type!==t.filter_type})}function o(e,r){var t=this;t.id=u.observable(e.id),t.username=u.observable(e.username),t.active=u.observable(e.active),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.provider=u.observable(e.provider),t.display_name=u.computed(function(){return t.username()}),t.filter_id=u.computed(function(){return t.provider().id}),t.filter_type="own",t.filtered=u.computed(function(){var e=r.filter_by();return e.id&&e.id!==t.filter_id()||e.type&&e.type!==t.filter_type})}function s(e,r){var t=this;t.id=u.observable(e.id),t.name=u.observable(e.name),t.full_name=u.observable(e.full_name),t.description=u.observable(e.description),t.vcs=u.observable(e.vcs),t.organization=u.observable(e.organization),t.html_url=u.observable(e.html_url),t.clone_url=u.observable(e.clone_url),t.ssh_url=u.observable(e.ssh_url),t.matches=u.observable(e.matches),t.match=u.computed(function(){var e=t.matches();if(e&&e.length>0)return e[0]}),t["private"]=u.observable(e["private"]),t.active=u.observable(e.active),t.admin=u.observable(e.admin),t.is_locked=u.computed(function(){return t["private"]()&&!t.admin()}),t.avatar_url=u.observable(n(e.avatar_url,{size:32})),t.import_repo=function(){var e={name:t.name(),repo:t.clone_url(),repo_type:t.vcs(),description:t.description(),project_url:t.html_url(),remote_repository:t.id()},n=c("");n.attr("action",r.urls.projects_import).attr("method","POST").hide(),Object.keys(e).map(function(r){var t=c("").attr("type","hidden").attr("name",r).attr("value",e[r]);n.append(t)});var a=c("").attr("type","hidden").attr("name","csrfmiddlewaretoken").attr("value",r.csrf_token);n.append(a);var o=c("").attr("type","submit");n.append(o),c("body").append(n),n.submit()}}function i(e,r){var t=this;t.config=r||{},t.urls=r.urls||{},t.csrf_token=r.csrf_token||"",t.error=u.observable(null),t.is_syncing=u.observable(!1),t.is_ready=u.observable(!1),t.page_current=u.observable(null),t.page_next=u.observable(null),t.page_previous=u.observable(null),t.filter_by=u.observable({id:null,type:null}),t.accounts_raw=u.observableArray(),t.organizations_raw=u.observableArray(),t.filters=u.computed(function(){var e,r=[],n=t.accounts_raw(),s=t.organizations_raw();for(e in n){var i=new o(n[e],t);r.push(i)}for(e in s){var u=new a(s[e],t);r.push(u)}return r}),t.projects=u.observableArray(),u.computed(function(){var e=t.filter_by(),r=t.page_current()||t.urls["remoterepository-list"];t.page_current()||("org"===e.type&&(r=n(t.urls["remoterepository-list"],{org:e.id})),"own"===e.type&&(r=n(t.urls["remoterepository-list"],{own:e.id}))),t.error(null),c.getJSON(r).success(function(e){var r=[];t.page_next(e.next),t.page_previous(e.previous);var n;for(n in e.results){var a=new s(e.results[n],t);r.push(a)}t.projects(r)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})}).always(function(){t.is_ready(!0)})}).extend({deferred:!0}),t.get_organizations=function(){c.getJSON(t.urls["remoteorganization-list"]).success(function(e){t.organizations_raw(e.results)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})})},t.get_accounts=function(){c.getJSON(t.urls["remoteaccount-list"]).success(function(e){t.accounts_raw(e.results)}).error(function(e){var r=e.responseJSON.detail||e.statusText;t.error({message:r})})},t.sync_projects=function(){var e=t.urls.api_sync_remote_repositories;t.error(null),t.is_syncing(!0),l.trigger_task({url:e,token:t.csrf_token}).then(function(e){t.get_organizations(),t.get_accounts()}).fail(function(e){t.error(e)}).always(function(){t.is_syncing(!1)})},t.has_projects=u.computed(function(){return t.projects().length>0}),t.next_page=function(){t.page_current(t.page_next())},t.previous_page=function(){t.page_current(t.page_previous())},t.set_filter_by=function(e,r){var n=t.filter_by();n.id===e?(n.id=null,n.type=null):(n.id=e,n.type=r),t.filter_by(n),n.id&&t.page_current(null)}}var u=e("knockout"),c=e("jquery"),l=e("readthedocs/core/static-src/core/js/tasks");c(function(){var e=c("#id_repo"),r=c("#id_repo_type");e.blur(function(){var t,n=e.val();switch(!0){case/^hg/.test(n):t="hg";break;case/^bzr/.test(n):case/launchpad/.test(n):t="bzr";break;case/trunk/.test(n):case/^svn/.test(n):t="svn";break;default:case/github/.test(n):case/(^git|\.git$)/.test(n):t="git"}r.val(t)})}),i.init=function(e,r,t){var n=new i(r,t);return n.get_accounts(),n.get_organizations(),u.applyBindings(n,e),n},r.exports.ProjectImportView=i},{jquery:"jquery",knockout:"knockout","readthedocs/core/static-src/core/js/tasks":1}]},{},[]); diff --git a/readthedocs/projects/static/projects/js/tools.js b/readthedocs/projects/static/projects/js/tools.js index f65d13e1c81..48a8978d52b 100644 --- a/readthedocs/projects/static/projects/js/tools.js +++ b/readthedocs/projects/static/projects/js/tools.js @@ -1 +1 @@ -require=function(){function e(t,n,r){function s(i,a){if(!n[i]){if(!t[i]){var l="function"==typeof require&&require;if(!a&&l)return l(i,!0);if(o)return o(i,!0);var c=new Error("Cannot find module '"+i+"'");throw c.code="MODULE_NOT_FOUND",c}var u=n[i]={exports:{}};t[i][0].call(u.exports,function(e){var n=t[i][1][e];return s(n?n:e)},u,u.exports,e,t,n,r)}return n[i].exports}for(var o="function"==typeof require&&require,i=0;i0;)e.firstChild.remove();e.appendChild(t)}var n=t.contentWindow;n.document.open(),n.document.write(this.content),n.document.close();var r=n.document.head,s=(n.document.body,null);if(r){s=n.document.createElement("base"),s.target="_parent",s.href=this.url,r.appendChild(s);for(var o=document.head.getElementsByTagName("link"),i=0;i0;)self._completeHandlers.shift()(e)}function success(resp){var type=o.type||resp&&setType(resp.getResponseHeader("Content-Type"));resp="jsonp"!==type?self.request:resp;var filteredResponse=globalSetupOptions.dataFilter(resp.responseText,type),r=filteredResponse;try{resp.responseText=r}catch(e){}if(r)switch(type){case"json":try{resp=win.JSON?win.JSON.parse(r):eval("("+r+")")}catch(err){return error(resp,"Could not parse JSON in response",err)}break;case"js":resp=eval(r);break;case"html":resp=r;break;case"xml":resp=resp.responseXML&&resp.responseXML.parseError&&resp.responseXML.parseError.errorCode&&resp.responseXML.parseError.reason?null:resp.responseXML}for(self._responseArgs.resp=resp,self._fulfilled=!0,fn(resp),self._successHandler(resp);self._fulfillmentHandlers.length>0;)resp=self._fulfillmentHandlers.shift()(resp);complete(resp)}function timedOut(){self._timedOut=!0,self.request.abort()}function error(e,t,n){for(e=self.request,self._responseArgs.resp=e,self._responseArgs.msg=t,self._responseArgs.t=n,self._erred=!0;self._errorHandlers.length>0;)self._errorHandlers.shift()(e,t,n);complete(e)}this.url="string"==typeof o?o:o.url,this.timeout=null,this._fulfilled=!1,this._successHandler=function(){},this._fulfillmentHandlers=[],this._errorHandlers=[],this._completeHandlers=[],this._erred=!1,this._responseArgs={};var self=this;fn=fn||function(){},o.timeout&&(this.timeout=setTimeout(function(){timedOut()},o.timeout)),o.success&&(this._successHandler=function(){o.success.apply(o,arguments)}),o.error&&this._errorHandlers.push(function(){o.error.apply(o,arguments)}),o.complete&&this._completeHandlers.push(function(){o.complete.apply(o,arguments)}),this.request=getRequest.call(this,success,error)}function reqwest(e,t){return new Reqwest(e,t)}function normalize(e){return e?e.replace(/\r?\n/g,"\r\n"):""}function serial(e,t){var n,r,s,o,i=e.name,a=e.tagName.toLowerCase(),l=function(e){e&&!e.disabled&&t(i,normalize(e.attributes.value&&e.attributes.value.specified?e.value:e.text))};if(!e.disabled&&i)switch(a){case"input":/reset|button|image|file/i.test(e.type)||(n=/checkbox/i.test(e.type),r=/radio/i.test(e.type),s=e.value,(!(n||r)||e.checked)&&t(i,normalize(n&&""===s?"on":s)));break;case"textarea":t(i,normalize(e.value));break;case"select":if("select-one"===e.type.toLowerCase())l(e.selectedIndex>=0?e.options[e.selectedIndex]:null);else for(o=0;e.length&&o'),c("body").append(t));var n=e.insertContent(t);c(n).show(),t.show(),c(document).click(function(e){c(e.target).closest("#embed-container").length||(c(n).remove(),t.remove())})}function s(e){var t=this;t.config=e||{},"undefined"==typeof t.config.api_host&&(t.config.api_host="https://readthedocs.org"),t.help=a.observable(null),t.error=a.observable(null),t.project=a.observable(t.config.project),t.file=a.observable(null),t.sections=a.observableArray(),a.computed(function(){var e=t.file();if(t.sections.removeAll(),e){t.help("Loading..."),t.error(null),t.section(null);var n=new i.Embed(t.config);n.page(t.project(),"latest",t.file(),function(e){t.sections.removeAll(),t.help(null),t.error(null);var n,r=[];for(n in e.sections){var s=e.sections[n];c.each(s,function(e,t){r.push({title:e,id:e})})}t.sections(r)},function(e){t.help(null),t.error("There was a problem retrieving data from the API")})}}),t.has_sections=a.computed(function(){return t.sections().length>0}),t.section=a.observable(null),t.has_section=a.computed(function(){return null!==t.section()&&""!==t.section()}),t.response=a.observable(null),a.computed(function(){var e=t.file(),n=t.section();if(null==e||null==n)return t.response(null);t.help("Loading..."),t.error(null),t.response(null),t.api_example(null);var r=new i.Embed(t.config);r.section(t.project(),"latest",t.file(),t.section(),function(e){t.help(null),t.error(null),t.api_example("var embed = Embed();\nembed.section(\n '"+t.project()+"', 'latest', '"+t.file()+"', '"+t.section()+"',\n function (section) {\n section.insertContent($('#help'));\n }\n);\n"),t.response(e)},function(e){t.help(null),t.error("There was a problem retrieving data from the API")})}),t.has_response=a.computed(function(){return null!=t.response()}),t.api_example=a.observable(null),t.show_help=function(){var e=new i.Embed(t.config);e.section("docs","latest","features/embed","Content Embedding",r)},t.show_embed=function(){new i.Embed(t.config);r(t.response())}}function o(e){var t=this;t.config=e||{},"undefined"==typeof t.config.api_host&&(t.config.api_host="https://readthedocs.org"),t.show_help=function(){var e=new i.Embed;e.section("docs","latest","business/analytics","Analytics",r)}}var i=e("./../../../../../bower_components/readthedocs-client/lib/readthedocs.js"),a=e("knockout"),l=e("jquery"),c=l;t.exports.init_embed=function(e){var t=new s(e);a.applyBindings(t,c("#tool-embed")[0])},t.exports.init_analytics=function(e){var t=new o(e);a.applyBindings(t,c("#tool-analytics")[0])}},{"./../../../../../bower_components/readthedocs-client/lib/readthedocs.js":3,jquery:"jquery",knockout:"knockout"}]},{},[]); \ No newline at end of file +require=function(){function e(t,n,r){function s(i,a){if(!n[i]){if(!t[i]){var l="function"==typeof require&&require;if(!a&&l)return l(i,!0);if(o)return o(i,!0);var c=new Error("Cannot find module '"+i+"'");throw c.code="MODULE_NOT_FOUND",c}var u=n[i]={exports:{}};t[i][0].call(u.exports,function(e){var n=t[i][1][e];return s(n?n:e)},u,u.exports,e,t,n,r)}return n[i].exports}for(var o="function"==typeof require&&require,i=0;i0;)e.firstChild.remove();e.appendChild(t)}var n=t.contentWindow;n.document.open(),n.document.write(this.content),n.document.close();var r=n.document.head,s=(n.document.body,null);if(r){s=n.document.createElement("base"),s.target="_parent",s.href=this.url,r.appendChild(s);for(var o=document.head.getElementsByTagName("link"),i=0;i0;)self._completeHandlers.shift()(e)}function success(resp){var type=o.type||resp&&setType(resp.getResponseHeader("Content-Type"));resp="jsonp"!==type?self.request:resp;var filteredResponse=globalSetupOptions.dataFilter(resp.responseText,type),r=filteredResponse;try{resp.responseText=r}catch(e){}if(r)switch(type){case"json":try{resp=win.JSON?win.JSON.parse(r):eval("("+r+")")}catch(err){return error(resp,"Could not parse JSON in response",err)}break;case"js":resp=eval(r);break;case"html":resp=r;break;case"xml":resp=resp.responseXML&&resp.responseXML.parseError&&resp.responseXML.parseError.errorCode&&resp.responseXML.parseError.reason?null:resp.responseXML}for(self._responseArgs.resp=resp,self._fulfilled=!0,fn(resp),self._successHandler(resp);self._fulfillmentHandlers.length>0;)resp=self._fulfillmentHandlers.shift()(resp);complete(resp)}function timedOut(){self._timedOut=!0,self.request.abort()}function error(e,t,n){for(e=self.request,self._responseArgs.resp=e,self._responseArgs.msg=t,self._responseArgs.t=n,self._erred=!0;self._errorHandlers.length>0;)self._errorHandlers.shift()(e,t,n);complete(e)}this.url="string"==typeof o?o:o.url,this.timeout=null,this._fulfilled=!1,this._successHandler=function(){},this._fulfillmentHandlers=[],this._errorHandlers=[],this._completeHandlers=[],this._erred=!1,this._responseArgs={};var self=this;fn=fn||function(){},o.timeout&&(this.timeout=setTimeout(function(){timedOut()},o.timeout)),o.success&&(this._successHandler=function(){o.success.apply(o,arguments)}),o.error&&this._errorHandlers.push(function(){o.error.apply(o,arguments)}),o.complete&&this._completeHandlers.push(function(){o.complete.apply(o,arguments)}),this.request=getRequest.call(this,success,error)}function reqwest(e,t){return new Reqwest(e,t)}function normalize(e){return e?e.replace(/\r?\n/g,"\r\n"):""}function serial(e,t){var n,r,s,o,i=e.name,a=e.tagName.toLowerCase(),l=function(e){e&&!e.disabled&&t(i,normalize(e.attributes.value&&e.attributes.value.specified?e.value:e.text))};if(!e.disabled&&i)switch(a){case"input":/reset|button|image|file/i.test(e.type)||(n=/checkbox/i.test(e.type),r=/radio/i.test(e.type),s=e.value,(!(n||r)||e.checked)&&t(i,normalize(n&&""===s?"on":s)));break;case"textarea":t(i,normalize(e.value));break;case"select":if("select-one"===e.type.toLowerCase())l(e.selectedIndex>=0?e.options[e.selectedIndex]:null);else for(o=0;e.length&&o'),c("body").append(t));var n=e.insertContent(t);c(n).show(),t.show(),c(document).click(function(e){c(e.target).closest("#embed-container").length||(c(n).remove(),t.remove())})}function s(e){var t=this;t.config=e||{},"undefined"==typeof t.config.api_host&&(t.config.api_host="https://readthedocs.org"),t.help=a.observable(null),t.error=a.observable(null),t.project=a.observable(t.config.project),t.file=a.observable(null),t.sections=a.observableArray(),a.computed(function(){var e=t.file();if(t.sections.removeAll(),e){t.help("Loading..."),t.error(null),t.section(null);var n=new i.Embed(t.config);n.page(t.project(),"latest",t.file(),function(e){t.sections.removeAll(),t.help(null),t.error(null);var n,r=[];for(n in e.sections){var s=e.sections[n];c.each(s,function(e,t){r.push({title:e,id:e})})}t.sections(r)},function(e){t.help(null),t.error("There was a problem retrieving data from the API")})}}),t.has_sections=a.computed(function(){return t.sections().length>0}),t.section=a.observable(null),t.has_section=a.computed(function(){return null!==t.section()&&""!==t.section()}),t.response=a.observable(null),a.computed(function(){var e=t.file(),n=t.section();if(null==e||null==n)return t.response(null);t.help("Loading..."),t.error(null),t.response(null),t.api_example(null);var r=new i.Embed(t.config);r.section(t.project(),"latest",t.file(),t.section(),function(e){t.help(null),t.error(null),t.api_example("var embed = Embed();\nembed.section(\n '"+t.project()+"', 'latest', '"+t.file()+"', '"+t.section()+"',\n function (section) {\n section.insertContent($('#help'));\n }\n);\n"),t.response(e)},function(e){t.help(null),t.error("There was a problem retrieving data from the API")})}),t.has_response=a.computed(function(){return null!=t.response()}),t.api_example=a.observable(null),t.show_help=function(){var e=new i.Embed(t.config);e.section("docs","latest","features/embed","Content Embedding",r)},t.show_embed=function(){new i.Embed(t.config);r(t.response())}}function o(e){var t=this;t.config=e||{},"undefined"==typeof t.config.api_host&&(t.config.api_host="https://readthedocs.org"),t.show_help=function(){var e=new i.Embed;e.section("docs","latest","business/analytics","Analytics",r)}}var i=e("./../../../../../bower_components/readthedocs-client/lib/readthedocs.js"),a=e("knockout"),l=e("jquery"),c=l;t.exports.init_embed=function(e){var t=new s(e);a.applyBindings(t,c("#tool-embed")[0])},t.exports.init_analytics=function(e){var t=new o(e);a.applyBindings(t,c("#tool-analytics")[0])}},{"./../../../../../bower_components/readthedocs-client/lib/readthedocs.js":3,jquery:"jquery",knockout:"knockout"}]},{},[]); diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py index 4eec679380b..6b3cb8ae672 100644 --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Tasks related to projects. @@ -5,7 +6,8 @@ rebuilding documentation. """ -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) import datetime import hashlib @@ -27,29 +29,23 @@ from readthedocs_build.config import ConfigError from slumber.exceptions import HttpClientError -from .constants import LOG_TEMPLATE -from .exceptions import RepositoryError -from .models import ImportedFile, Project, Domain -from .signals import before_vcs, after_vcs, before_build, after_build -from readthedocs.builds.constants import (LATEST, - BUILD_STATE_CLONING, - BUILD_STATE_INSTALLING, - BUILD_STATE_BUILDING, - BUILD_STATE_FINISHED) -from readthedocs.builds.models import Build, Version, APIVersion +from readthedocs.builds.constants import ( + BUILD_STATE_BUILDING, BUILD_STATE_CLONING, BUILD_STATE_FINISHED, + BUILD_STATE_INSTALLING, LATEST) +from readthedocs.builds.models import APIVersion, Build, Version from readthedocs.builds.signals import build_complete from readthedocs.builds.syncers import Syncer from readthedocs.cdn.purge import purge from readthedocs.core.resolver import resolve_path -from readthedocs.core.symlink import PublicSymlink, PrivateSymlink -from readthedocs.core.utils import send_email, broadcast +from readthedocs.core.symlink import PrivateSymlink, PublicSymlink +from readthedocs.core.utils import broadcast, send_email from readthedocs.doc_builder.config import load_yaml_config from readthedocs.doc_builder.constants import DOCKER_LIMITS -from readthedocs.doc_builder.environments import (LocalBuildEnvironment, - DockerBuildEnvironment) +from readthedocs.doc_builder.environments import ( + DockerBuildEnvironment, LocalBuildEnvironment) from readthedocs.doc_builder.exceptions import BuildEnvironmentError from readthedocs.doc_builder.loader import get_builder_class -from readthedocs.doc_builder.python_environments import Virtualenv, Conda +from readthedocs.doc_builder.python_environments import Conda, Virtualenv from readthedocs.projects.models import APIProject from readthedocs.restapi.client import api as api_v2 from readthedocs.restapi.utils import index_search_request @@ -57,6 +53,10 @@ from readthedocs.vcs_support import utils as vcs_support_utils from readthedocs.worker import app +from .constants import LOG_TEMPLATE +from .exceptions import RepositoryError +from .models import Domain, ImportedFile, Project +from .signals import after_build, after_vcs, before_build, before_vcs log = logging.getLogger(__name__) @@ -83,9 +83,9 @@ def get_version(project=None, version_pk=None): if version_pk: version_data = api_v2.version(version_pk).get() else: - version_data = (api_v2 - .version(project.slug) - .get(slug=LATEST)['objects'][0]) + version_data = ( + api_v2.version(project.slug).get(slug=LATEST)['objects'][0] + ) return APIVersion(**version_data) def sync_repo(self): @@ -103,7 +103,8 @@ def sync_repo(self): with self.project.repo_nonblockinglock( version=self.version, - max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): + max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30), + ): # Get the actual code on disk try: @@ -129,23 +130,23 @@ def sync_repo(self): version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: - version_post_data['tags'] = [ - {'identifier': v.identifier, - 'verbose_name': v.verbose_name, - } for v in version_repo.tags - ] + version_post_data['tags'] = [{ + 'identifier': v.identifier, + 'verbose_name': v.verbose_name, + } for v in version_repo.tags] if version_repo.supports_branches: - version_post_data['branches'] = [ - {'identifier': v.identifier, - 'verbose_name': v.verbose_name, - } for v in version_repo.branches - ] + version_post_data['branches'] = [{ + 'identifier': v.identifier, + 'verbose_name': v.verbose_name, + } for v in version_repo.branches] try: # Hit the API ``sync_versions`` which may trigger a new build # for the stable version - api_v2.project(self.project.pk).sync_versions.post(version_post_data) + api_v2.project( + self.project.pk, + ).sync_versions.post(version_post_data) except HttpClientError: log.exception('Sync Versions Exception') except Exception: @@ -155,10 +156,13 @@ def sync_repo(self): # refactored out anyways, as calling from the method removes the original # caller from logging. def _log(self, msg): - log.info(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg=msg)) + log.info( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=msg, + ), + ) class SyncRepositoryTask(SyncRepositoryMixin, Task): @@ -183,12 +187,15 @@ def run(self, version_pk): # pylint: disable=arguments-differ self.project = self.version.project self.sync_repo() return True - # Catch unhandled errors when syncing + except RepositoryError as e: + # Do not log as ERROR handled exceptions + log.warning('There was an error with the repository: msg=%s', e.msg) except Exception: + # Catch unhandled errors when syncing log.exception( 'An unhandled exception was raised during VCS syncing', ) - return False + return False class UpdateDocsTask(SyncRepositoryMixin, Task): @@ -206,9 +213,18 @@ class UpdateDocsTask(SyncRepositoryMixin, Task): name = __name__ + '.update_docs' # TODO: the argument from the __init__ are used only in tests - def __init__(self, build_env=None, python_env=None, config=None, - force=False, search=True, localmedia=True, - build=None, project=None, version=None): + def __init__( + self, + build_env=None, + python_env=None, + config=None, + force=False, + search=True, + localmedia=True, + build=None, + project=None, + version=None, + ): self.build_env = build_env self.python_env = python_env self.build_force = force @@ -227,14 +243,19 @@ def __init__(self, build_env=None, python_env=None, config=None, self.config = config def _log(self, msg): - log.info(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg=msg)) + log.info( + LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=msg, + ), + ) # pylint: disable=arguments-differ - def run(self, pk, version_pk=None, build_pk=None, record=True, - docker=False, search=True, force=False, localmedia=True, **__): + def run( + self, pk, version_pk=None, build_pk=None, record=True, docker=False, + search=True, force=False, localmedia=True, **__ + ): """ Run a documentation sync n' build. @@ -281,12 +302,12 @@ def run(self, pk, version_pk=None, build_pk=None, record=True, except Exception as e: # noqa log.exception( 'An unhandled exception was raised during build setup', - extra={'tags': {'build': build_pk}} + extra={'tags': {'build': build_pk}}, ) self.setup_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, - ) + ), ) self.setup_env.update_build(BUILD_STATE_FINISHED) return False @@ -298,12 +319,12 @@ def run(self, pk, version_pk=None, build_pk=None, record=True, except Exception as e: # noqa log.exception( 'An unhandled exception was raised during project build', - extra={'tags': {'build': build_pk}} + extra={'tags': {'build': build_pk}}, ) self.build_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, - ) + ), ) self.build_env.update_build(BUILD_STATE_FINISHED) return False @@ -328,30 +349,37 @@ def run_setup(self, record=True): with self.setup_env: if self.project.skip: raise BuildEnvironmentError( - _('Builds for this project are temporarily disabled')) + _('Builds for this project are temporarily disabled'), + ) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', - status_code=423 + status_code=423, ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( - 'Problem parsing YAML configuration. {0}'.format(str(e)) + 'Problem parsing YAML configuration. {0}'.format(str(e)), ) if self.setup_env.failure or self.config is None: - self._log('Failing build because of setup failure: %s' % self.setup_env.failure) + self._log( + 'Failing build because of setup failure: %s' % + self.setup_env.failure, + ) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) - if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): + if not isinstance( + self.setup_env.failure, + vcs_support_utils.LockTimeout, + ): self.send_notifications() return False @@ -374,8 +402,14 @@ def run_build(self, docker=False, record=True): env_cls = DockerBuildEnvironment else: env_cls = LocalBuildEnvironment - self.build_env = env_cls(project=self.project, version=self.version, config=self.config, - build=self.build, record=record, environment=env_vars) + self.build_env = env_cls( + project=self.project, + version=self.version, + config=self.config, + build=self.build, + record=record, + environment=env_vars, + ) # Environment used for building code, usually with Docker with self.build_env: @@ -387,9 +421,11 @@ def run_build(self, docker=False, record=True): if self.config.use_conda: self._log('Using conda') python_env_cls = Conda - self.python_env = python_env_cls(version=self.version, - build_env=self.build_env, - config=self.config) + self.python_env = python_env_cls( + version=self.version, + build_env=self.build_env, + config=self.config, + ) try: self.setup_python_environment() @@ -435,8 +471,8 @@ def get_build(build_pk): if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) - if key not in ['project', 'version', 'resource_uri', - 'absolute_uri']) + if key not in + ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ @@ -459,18 +495,28 @@ def get_env_vars(self): env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, - 'READTHEDOCS_PROJECT': self.project.slug + 'READTHEDOCS_PROJECT': self.project.slug, } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, - 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') + 'BIN_PATH': os.path.join( + self.project.doc_path, + 'conda', + self.version.slug, + 'bin', + ), }) else: env.update({ - 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') + 'BIN_PATH': os.path.join( + self.project.doc_path, + 'envs', + self.version.slug, + 'bin', + ), }) return env @@ -496,8 +542,14 @@ def update_documentation_type(self): api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret - def update_app_instances(self, html=False, localmedia=False, search=False, - pdf=False, epub=False): + def update_app_instances( + self, + html=False, + localmedia=False, + search=False, + pdf=False, + epub=False, + ): """ Update application instances with build artifacts. @@ -513,7 +565,10 @@ def update_app_instances(self, html=False, localmedia=False, search=False, 'built': True, }) except HttpClientError: - log.exception('Updating version failed, skipping file sync: version=%s' % self.version) + log.exception( + 'Updating version failed, skipping file sync: version=%s', + self.version, + ) # Broadcast finalization steps to web application instances broadcast( @@ -531,7 +586,10 @@ def update_app_instances(self, html=False, localmedia=False, search=False, pdf=pdf, epub=epub, ), - callback=sync_callback.s(version_pk=self.version.pk, commit=self.build['commit']), + callback=sync_callback.s( + version_pk=self.version.pk, + commit=self.build['commit'], + ), ) def setup_python_environment(self): @@ -546,7 +604,8 @@ def setup_python_environment(self): with self.project.repo_nonblockinglock( version=self.version, - max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): + max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30), + ): # Check if the python version/build image in the current venv is the # same to be used in this build and if it differs, wipe the venv to @@ -579,7 +638,8 @@ def build_docs(self): outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock( version=self.version, - max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): + max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30), + ): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() @@ -604,10 +664,15 @@ def build_docs_html(self): # Gracefully attempt to move files via task on web workers. try: - broadcast(type='app', task=move_files, - args=[self.version.pk, socket.gethostname()], - kwargs=dict(html=True) - ) + broadcast( + type='app', + task=move_files, + args=[ + self.version.pk, + socket.gethostname(), + ], + kwargs=dict(html=True), + ) except socket.error: log.exception('move_files task has failed on socket error.') @@ -632,7 +697,7 @@ def build_docs_localmedia(self): def build_docs_pdf(self): """Build PDF docs.""" if ('pdf' not in self.config.formats or - self.project.slug in HTML_ONLY or + self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') @@ -640,7 +705,7 @@ def build_docs_pdf(self): def build_docs_epub(self): """Build ePub docs.""" if ('epub' not in self.config.formats or - self.project.slug in HTML_ONLY or + self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') @@ -653,7 +718,10 @@ def build_docs_class(self, builder_class): only raise a warning exception here. A hard error will halt the build process. """ - builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) + builder = get_builder_class(builder_class)( + self.build_env, + python_env=self.python_env, + ) success = builder.build() builder.move() return success @@ -665,8 +733,16 @@ def send_notifications(self): # Web tasks @app.task(queue='web') -def sync_files(project_pk, version_pk, hostname=None, html=False, - localmedia=False, search=False, pdf=False, epub=False): +def sync_files( + project_pk, + version_pk, + hostname=None, + html=False, + localmedia=False, + search=False, + pdf=False, + epub=False, +): """ Sync build artifacts to application instances. @@ -687,7 +763,7 @@ def sync_files(project_pk, version_pk, hostname=None, html=False, localmedia=localmedia, search=search, pdf=pdf, - epub=epub + epub=epub, ) # Symlink project @@ -698,8 +774,15 @@ def sync_files(project_pk, version_pk, hostname=None, html=False, @app.task(queue='web') -def move_files(version_pk, hostname, html=False, localmedia=False, search=False, - pdf=False, epub=False): +def move_files( + version_pk, + hostname, + html=False, + localmedia=False, + search=False, + pdf=False, + epub=False, +): """ Task to move built documentation to web servers. @@ -717,42 +800,69 @@ def move_files(version_pk, hostname, html=False, localmedia=False, search=False, :type epub: bool """ version = Version.objects.get(pk=version_pk) - log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, - msg='Moving files')) + log.debug( + LOG_TEMPLATE.format( + project=version.project.slug, + version=version.slug, + msg='Moving files', + ), + ) if html: from_path = version.project.artifact_path( - version=version.slug, type_=version.project.documentation_type) + version=version.slug, + type_=version.project.documentation_type, + ) target = version.project.rtd_build_path(version.slug) Syncer.copy(from_path, target, host=hostname) if 'sphinx' in version.project.documentation_type: if search: from_path = version.project.artifact_path( - version=version.slug, type_='sphinx_search') + version=version.slug, + type_='sphinx_search', + ) to_path = version.project.get_production_media_path( - type_='json', version_slug=version.slug, include_file=False) + type_='json', + version_slug=version.slug, + include_file=False, + ) Syncer.copy(from_path, to_path, host=hostname) if localmedia: from_path = version.project.artifact_path( - version=version.slug, type_='sphinx_localmedia') + version=version.slug, + type_='sphinx_localmedia', + ) to_path = version.project.get_production_media_path( - type_='htmlzip', version_slug=version.slug, include_file=False) + type_='htmlzip', + version_slug=version.slug, + include_file=False, + ) Syncer.copy(from_path, to_path, host=hostname) # Always move PDF's because the return code lies. if pdf: - from_path = version.project.artifact_path(version=version.slug, - type_='sphinx_pdf') + from_path = version.project.artifact_path( + version=version.slug, + type_='sphinx_pdf', + ) to_path = version.project.get_production_media_path( - type_='pdf', version_slug=version.slug, include_file=False) + type_='pdf', + version_slug=version.slug, + include_file=False, + ) Syncer.copy(from_path, to_path, host=hostname) if epub: - from_path = version.project.artifact_path(version=version.slug, - type_='sphinx_epub') + from_path = version.project.artifact_path( + version=version.slug, + type_='sphinx_epub', + ) to_path = version.project.get_production_media_path( - type_='epub', version_slug=version.slug, include_file=False) + type_='epub', + version_slug=version.slug, + include_file=False, + ) Syncer.copy(from_path, to_path, host=hostname) @@ -770,13 +880,18 @@ def update_search(version_pk, commit, delete_non_commit_files=True): if version.project.is_type_sphinx: page_list = process_all_json_files(version, build_dir=False) else: - log.debug('Unknown documentation type: %s', - version.project.documentation_type) + log.debug( + 'Unknown documentation type: %s', + version.project.documentation_type, + ) return log_msg = ' '.join([page['path'] for page in page_list]) - log.info("(Search Index) Sending Data: %s [%s]", version.project.slug, - log_msg) + log.info( + '(Search Index) Sending Data: %s [%s]', + version.project.slug, + log_msg, + ) index_search_request( version=version, page_list=page_list, @@ -820,7 +935,9 @@ def remove_orphan_symlinks(): """ for symlink in [PublicSymlink, PrivateSymlink]: for domain_path in [symlink.PROJECT_CNAME_ROOT, symlink.CNAME_ROOT]: - valid_cnames = set(Domain.objects.all().values_list('domain', flat=True)) + valid_cnames = set( + Domain.objects.all().values_list('domain', flat=True), + ) orphan_cnames = set(os.listdir(domain_path)) - valid_cnames for cname in orphan_cnames: orphan_domain_path = os.path.join(domain_path, cname) @@ -857,22 +974,36 @@ def fileify(version_pk, commit): project = version.project if not commit: - log.info(LOG_TEMPLATE - .format(project=project.slug, version=version.slug, - msg=('Imported File not being built because no commit ' - 'information'))) + log.info( + LOG_TEMPLATE.format( + project=project.slug, + version=version.slug, + msg=( + 'Imported File not being built because no commit ' + 'information' + ), + ), + ) return path = project.rtd_build_path(version.slug) if path: - log.info(LOG_TEMPLATE - .format(project=version.project.slug, version=version.slug, - msg='Creating ImportedFiles')) + log.info( + LOG_TEMPLATE.format( + project=version.project.slug, + version=version.slug, + msg='Creating ImportedFiles', + ), + ) _manage_imported_files(version, path, commit) else: - log.info(LOG_TEMPLATE - .format(project=project.slug, version=version.slug, - msg='No ImportedFile files')) + log.info( + LOG_TEMPLATE.format( + project=project.slug, + version=version.slug, + msg='No ImportedFile files', + ), + ) def _manage_imported_files(version, path, commit): @@ -886,8 +1017,10 @@ def _manage_imported_files(version, path, commit): changed_files = set() for root, __, filenames in os.walk(path): for filename in filenames: - dirpath = os.path.join(root.replace(path, '').lstrip('/'), - filename.lstrip('/')) + dirpath = os.path.join( + root.replace(path, '').lstrip('/'), + filename.lstrip('/'), + ) full_path = os.path.join(root, filename) md5 = hashlib.md5(open(full_path, 'rb').read()).hexdigest() try: @@ -898,7 +1031,7 @@ def _manage_imported_files(version, path, commit): name=filename, ) except ImportedFile.MultipleObjectsReturned: - log.exception('Error creating ImportedFile') + log.warning('Error creating ImportedFile') continue if obj.md5 != md5: obj.md5 = md5 @@ -907,16 +1040,21 @@ def _manage_imported_files(version, path, commit): obj.commit = commit obj.save() # Delete ImportedFiles from previous versions - ImportedFile.objects.filter(project=version.project, - version=version - ).exclude(commit=commit).delete() + ImportedFile.objects.filter( + project=version.project, + version=version, + ).exclude(commit=commit).delete() # Purge Cache cdn_ids = getattr(settings, 'CDN_IDS', None) if cdn_ids: if version.project.slug in cdn_ids: - changed_files = [resolve_path( - version.project, filename=fname, version_slug=version.slug, - ) for fname in changed_files] + changed_files = [ + resolve_path( + version.project, + filename=fname, + version_slug=version.slug, + ) for fname in changed_files + ] purge(cdn_ids[version.project.slug], changed_files) @@ -927,7 +1065,10 @@ def send_notifications(version_pk, build_pk): for hook in version.project.webhook_notifications.all(): webhook_notification(version, build, hook.url) - for email in version.project.emailhook_notifications.all().values_list('email', flat=True): + for email in version.project.emailhook_notifications.all().values_list( + 'email', + flat=True, + ): email_notification(version, build, email) @@ -939,8 +1080,13 @@ def email_notification(version, build, email): :param build: :py:class:`Build` instance that failed :param email: Email recipient address """ - log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, - msg='sending email to: %s' % email)) + log.debug( + LOG_TEMPLATE.format( + project=version.project.slug, + version=version.slug, + msg='sending email to: %s' % email, + ), + ) # We send only what we need from the Django model objects here to avoid # serialization problems in the ``readthedocs.core.tasks.send_email_task`` @@ -966,9 +1112,13 @@ def email_notification(version, build, email): } if build.commit: - title = _('Failed: {project[name]} ({commit})').format(commit=build.commit[:8], **context) + title = _('Failed: {project[name]} ({commit})').format( + commit=build.commit[:8], **context + ) else: - title = _('Failed: {project[name]} ({version[verbose_name]})').format(**context) + title = _('Failed: {project[name]} ({version[verbose_name]})').format( + **context + ) send_email( email, @@ -996,11 +1146,15 @@ def webhook_notification(version, build, hook_url): 'id': build.id, 'success': build.success, 'date': build.date.strftime('%Y-%m-%d %H:%M:%S'), - } + }, }) - log.debug(LOG_TEMPLATE - .format(project=project.slug, version='', - msg='sending notification to: %s' % hook_url)) + log.debug( + LOG_TEMPLATE.format( + project=project.slug, + version='', + msg='sending notification to: %s' % hook_url, + ), + ) try: requests.post(hook_url, data=data) except Exception: @@ -1027,11 +1181,13 @@ def update_static_metadata(project_pk, path=None): if not path: path = project.static_metadata_path() - log.info(LOG_TEMPLATE.format( - project=project.slug, - version='', - msg='Updating static metadata', - )) + log.info( + LOG_TEMPLATE.format( + project=project.slug, + version='', + msg='Updating static metadata', + ), + ) translations = [trans.language for trans in project.translations.all()] languages = set(translations) # Convert to JSON safe types @@ -1048,11 +1204,13 @@ def update_static_metadata(project_pk, path=None): json.dump(metadata, fh) fh.close() except (AttributeError, IOError) as e: - log.debug(LOG_TEMPLATE.format( - project=project.slug, - version='', - msg='Cannot write to metadata.json: {0}'.format(e) - )) + log.debug( + LOG_TEMPLATE.format( + project=project.slug, + version='', + msg='Cannot write to metadata.json: {0}'.format(e), + ), + ) # Random Tasks @@ -1064,7 +1222,7 @@ def remove_dir(path): This is mainly a wrapper around shutil.rmtree so that app servers can kill things on the build server. """ - log.info("Removing %s", path) + log.info('Removing %s', path) shutil.rmtree(path, ignore_errors=True) @@ -1082,24 +1240,36 @@ def clear_artifacts(version_pk): def clear_pdf_artifacts(version): if isinstance(version, int): version = Version.objects.get(pk=version) - remove_dir(version.project.get_production_media_path( - type_='pdf', version_slug=version.slug)) + remove_dir( + version.project.get_production_media_path( + type_='pdf', + version_slug=version.slug, + ), + ) @app.task() def clear_epub_artifacts(version): if isinstance(version, int): version = Version.objects.get(pk=version) - remove_dir(version.project.get_production_media_path( - type_='epub', version_slug=version.slug)) + remove_dir( + version.project.get_production_media_path( + type_='epub', + version_slug=version.slug, + ), + ) @app.task() def clear_htmlzip_artifacts(version): if isinstance(version, int): version = Version.objects.get(pk=version) - remove_dir(version.project.get_production_media_path( - type_='htmlzip', version_slug=version.slug)) + remove_dir( + version.project.get_production_media_path( + type_='htmlzip', + version_slug=version.slug, + ), + ) @app.task() @@ -1134,8 +1304,10 @@ def finish_inactive_builds(): """ time_limit = int(DOCKER_LIMITS['time'] * 1.2) delta = datetime.timedelta(seconds=time_limit) - query = (~Q(state=BUILD_STATE_FINISHED) & - Q(date__lte=datetime.datetime.now() - delta)) + query = ( + ~Q(state=BUILD_STATE_FINISHED) & + Q(date__lte=datetime.datetime.now() - delta) + ) builds_finished = 0 builds = Build.objects.filter(query)[:50] @@ -1143,7 +1315,8 @@ def finish_inactive_builds(): if build.project.container_time_limit: custom_delta = datetime.timedelta( - seconds=int(build.project.container_time_limit)) + seconds=int(build.project.container_time_limit), + ) if build.date + custom_delta > datetime.datetime.now(): # Do not mark as FINISHED builds with a custom time limit that wasn't # expired yet (they are still building the project version) @@ -1154,7 +1327,7 @@ def finish_inactive_builds(): build.error = _( 'This build was terminated due to inactivity. If you ' 'continue to encounter this error, file a support ' - 'request with and reference this build id ({0}).'.format(build.pk) + 'request with and reference this build id ({0}).'.format(build.pk), ) build.save() builds_finished += 1 diff --git a/readthedocs/projects/views/base.py b/readthedocs/projects/views/base.py index db6e1195fbe..0b733625058 100644 --- a/readthedocs/projects/views/base.py +++ b/readthedocs/projects/views/base.py @@ -4,9 +4,9 @@ absolute_import, division, print_function, unicode_literals) import logging -from builtins import object from datetime import datetime, timedelta +from builtins import object from django.conf import settings from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect @@ -73,7 +73,8 @@ def get_project(self): return None return get_object_or_404( Project.objects.for_admin_user(user=self.request.user), - slug=self.kwargs[self.project_url_field]) + slug=self.kwargs[self.project_url_field], + ) def get_context_data(self, **kwargs): """Add project to context data.""" @@ -93,9 +94,10 @@ class ProjectSpamMixin(object): def post(self, request, *args, **kwargs): if request.user.profile.banned: - log.error( + log.info( 'Rejecting project POST from shadowbanned user %s', - request.user) + request.user, + ) return HttpResponseRedirect(self.get_failure_url()) try: return super(ProjectSpamMixin, self).post(request, *args, **kwargs) @@ -104,11 +106,12 @@ def post(self, request, *args, **kwargs): if request.user.date_joined > date_maturity: request.user.profile.banned = True request.user.profile.save() - log.error( + log.info( 'Spam detected from new user, shadowbanned user %s', - request.user) + request.user, + ) else: - log.error('Spam detected from user %s', request.user) + log.info('Spam detected from user %s', request.user) return HttpResponseRedirect(self.get_failure_url()) def get_failure_url(self): diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py index c9d52748b2b..4636dbc17e6 100644 --- a/readthedocs/projects/views/private.py +++ b/readthedocs/projects/views/private.py @@ -87,7 +87,9 @@ def project_manage(__, project_slug): @login_required def project_comments_moderation(request, project_slug): project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) return render( request, 'projects/project_comments_moderation.html', @@ -137,7 +139,9 @@ def project_versions(request, project_slug): like to have built. """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) if not project.is_imported: raise Http404 @@ -153,19 +157,27 @@ def project_versions(request, project_slug): return HttpResponseRedirect(project_dashboard) return render( - request, 'projects/project_versions.html', - {'form': form, 'project': project}) + request, + 'projects/project_versions.html', + {'form': form, 'project': project}, + ) @login_required def project_version_detail(request, project_slug, version_slug): """Project version detail page.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) version = get_object_or_404( Version.objects.public( - user=request.user, project=project, only_active=False), - slug=version_slug) + user=request.user, + project=project, + only_active=False, + ), + slug=version_slug, + ) form = VersionForm(request.POST or None, instance=version) @@ -175,15 +187,20 @@ def project_version_detail(request, project_slug, version_slug): if 'active' in form.changed_data and version.active is False: log.info('Removing files for version %s', version.slug) broadcast( - type='app', task=tasks.clear_artifacts, args=[version.pk]) + type='app', + task=tasks.clear_artifacts, + args=[version.pk], + ) version.built = False version.save() url = reverse('project_version_list', args=[project.slug]) return HttpResponseRedirect(url) return render( - request, 'projects/project_version_detail.html', - {'form': form, 'project': project, 'version': version}) + request, + 'projects/project_version_detail.html', + {'form': form, 'project': project, 'version': version}, + ) @login_required @@ -195,7 +212,9 @@ def project_delete(request, project_slug): confirmation of delete. """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) if request.method == 'POST': broadcast(type='app', task=tasks.remove_dir, args=[project.doc_path]) @@ -256,7 +275,8 @@ def done(self, form_list, **kwargs): project_import.send(sender=project, request=self.request) trigger_build(project, basic=basic_only) return HttpResponseRedirect( - reverse('projects_detail', args=[project.slug])) + reverse('projects_detail', args=[project.slug]), + ) def is_advanced(self): """Determine if the user selected the `show advanced` field.""" @@ -281,10 +301,13 @@ def get(self, request, *args, **kwargs): data = self.get_form_data() project = Project.objects.for_admin_user( - request.user).filter(repo=data['repo']).first() + request.user, + ).filter(repo=data['repo']).first() if project is not None: messages.success( - request, _('The demo project is already imported!')) + request, + _('The demo project is already imported!'), + ) else: kwargs = self.get_form_kwargs() form = self.form_class(data=data, **kwargs) @@ -293,24 +316,25 @@ def get(self, request, *args, **kwargs): project.save() trigger_build(project, basic=True) messages.success( - request, _('Your demo project is currently being imported')) + request, + _('Your demo project is currently being imported'), + ) else: - for (__, msg) in list(form.errors.items()): - log.error(msg) messages.error( request, _('There was a problem adding the demo project'), ) return HttpResponseRedirect(reverse('projects_dashboard')) return HttpResponseRedirect( - reverse('projects_detail', args=[project.slug])) + reverse('projects_detail', args=[project.slug]), + ) def get_form_data(self): """Get form data to post to import form.""" return { 'name': '{0}-demo'.format(self.request.user.username), 'repo_type': 'git', - 'repo': 'https://github.com/readthedocs/template.git' + 'repo': 'https://github.com/readthedocs/template.git', } def get_form_kwargs(self): @@ -344,7 +368,8 @@ def get(self, request, *args, **kwargs): .exclude( provider__in=[ service.adapter.provider_id for service in registry - ]) + ], + ) ) # yapf: disable for account in deprecated_accounts: provider_account = account.get_provider_account() @@ -354,10 +379,12 @@ def get(self, request, *args, **kwargs): _( 'There is a problem with your {service} account, ' 'try reconnecting your account on your ' - 'connected services page.').format( - service=provider_account.get_brand()['name'], - url=reverse('socialaccount_connections')) - )) # yapf: disable + 'connected services page.', + ).format( + service=provider_account.get_brand()['name'], + url=reverse('socialaccount_connections'), + ) + )), # yapf: disable ) return super(ImportView, self).get(request, *args, **kwargs) @@ -376,7 +403,8 @@ def get_context_data(self, **kwargs): context = super(ImportView, self).get_context_data(**kwargs) context['view_csrf_token'] = get_token(self.request) context['has_connected_accounts'] = SocialAccount.objects.filter( - user=self.request.user).exists() + user=self.request.user, + ).exists() return context @@ -384,7 +412,9 @@ def get_context_data(self, **kwargs): def edit_alias(request, project_slug, alias_id=None): """Edit project alias form view.""" proj = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) if alias_id: alias = proj.aliases.get(pk=alias_id) form = AliasForm(instance=alias, data=request.POST or None) @@ -409,7 +439,8 @@ class AliasList(PrivateViewMixin, ListView): def get_queryset(self): self.project = get_object_or_404( Project.objects.for_admin_user(self.request.user), - slug=self.kwargs.get('project_slug')) + slug=self.kwargs.get('project_slug'), + ) return self.project.aliases.all() @@ -426,8 +457,10 @@ def get_queryset(self): def get_form(self, data=None, files=None, **kwargs): kwargs['user'] = self.request.user - return super(ProjectRelationshipMixin, - self).get_form(data, files, **kwargs) + return super( + ProjectRelationshipMixin, + self, + ).get_form(data, files, **kwargs) def form_valid(self, form): broadcast( @@ -467,7 +500,9 @@ def get(self, request, *args, **kwargs): def project_users(request, project_slug): """Project users view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = UserForm(data=request.POST or None, project=project) @@ -490,9 +525,13 @@ def project_users_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) user = get_object_or_404( - User.objects.all(), username=request.POST.get('username')) + User.objects.all(), + username=request.POST.get('username'), + ) if user == request.user: raise Http404 project.users.remove(user) @@ -504,7 +543,9 @@ def project_users_delete(request, project_slug): def project_notifications(request, project_slug): """Project notification view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) email_form = EmailHookForm(data=request.POST or None, project=project) webhook_form = WebHookForm(data=request.POST or None, project=project) @@ -539,7 +580,9 @@ def project_notifications(request, project_slug): @login_required def project_comments_settings(request, project_slug): project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) return render( request, @@ -556,14 +599,18 @@ def project_notifications_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) try: project.emailhook_notifications.get( - email=request.POST.get('email')).delete() + email=request.POST.get('email'), + ).delete() except EmailHook.DoesNotExist: try: project.webhook_notifications.get( - url=request.POST.get('email')).delete() + url=request.POST.get('email'), + ).delete() except WebHook.DoesNotExist: raise Http404 project_dashboard = reverse('projects_notifications', args=[project.slug]) @@ -574,7 +621,9 @@ def project_notifications_delete(request, project_slug): def project_translations(request, project_slug): """Project translations view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = TranslationForm( data=request.POST or None, parent=project, @@ -621,7 +670,9 @@ def project_translations_delete(request, project_slug, child_slug): def project_redirects(request, project_slug): """Project redirects view and form view.""" project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) form = RedirectForm(data=request.POST or None, project=project) @@ -633,8 +684,10 @@ def project_redirects(request, project_slug): redirects = project.redirects.all() return render( - request, 'projects/project_redirects.html', - {'form': form, 'project': project, 'redirects': redirects}) + request, + 'projects/project_redirects.html', + {'form': form, 'project': project, 'redirects': redirects}, + ) @login_required @@ -643,15 +696,20 @@ def project_redirects_delete(request, project_slug): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is allowed') project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) redirect = get_object_or_404( - project.redirects, pk=request.POST.get('id_pk')) + project.redirects, + pk=request.POST.get('id_pk'), + ) if redirect.project == project: redirect.delete() else: raise Http404 return HttpResponseRedirect( - reverse('projects_redirects', args=[project.slug])) + reverse('projects_redirects', args=[project.slug]), + ) @login_required @@ -662,11 +720,17 @@ def project_version_delete_html(request, project_slug, version_slug): This marks a version as not built """ project = get_object_or_404( - Project.objects.for_admin_user(request.user), slug=project_slug) + Project.objects.for_admin_user(request.user), + slug=project_slug, + ) version = get_object_or_404( Version.objects.public( - user=request.user, project=project, only_active=False), - slug=version_slug) + user=request.user, + project=project, + only_active=False, + ), + slug=version_slug, + ) if not version.active: version.built = False @@ -674,9 +738,11 @@ def project_version_delete_html(request, project_slug, version_slug): broadcast(type='app', task=tasks.clear_artifacts, args=[version.pk]) else: return HttpResponseBadRequest( - "Can't delete HTML for an active version.") + "Can't delete HTML for an active version.", + ) return HttpResponseRedirect( - reverse('project_version_list', kwargs={'project_slug': project_slug})) + reverse('project_version_list', kwargs={'project_slug': project_slug}), + ) class DomainMixin(ProjectAdminMixin, PrivateViewMixin): @@ -774,7 +840,8 @@ def get_template_names(self): suffix = self.SUFFIX_MAP.get(integration_type, integration_type) return ( 'projects/integration_{0}{1}.html' - .format(suffix, self.template_name_suffix)) + .format(suffix, self.template_name_suffix) + ) class IntegrationDelete(IntegrationMixin, DeleteView): diff --git a/readthedocs/restapi/utils.py b/readthedocs/restapi/utils.py index 43197d98d2f..7df7732133d 100644 --- a/readthedocs/restapi/utils.py +++ b/readthedocs/restapi/utils.py @@ -20,7 +20,9 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin """Update the database with the current versions from the repository.""" old_versions = {} old_version_values = project.versions.filter(type=type).values( - 'identifier', 'verbose_name') + 'identifier', + 'verbose_name', + ) for version in old_version_values: old_versions[version['verbose_name']] = version['identifier'] @@ -36,11 +38,13 @@ def sync_versions(project, versions, type): # pylint: disable=redefined-builtin else: # Update slug with new identifier Version.objects.filter( - project=project, verbose_name=version_name).update( - identifier=version_id, - type=type, - machine=False, - ) # noqa + project=project, + verbose_name=version_name, + ).update( + identifier=version_id, + type=type, + machine=False, + ) # noqa log.info( '(Sync Versions) Updated Version: [%s=%s] ', @@ -85,8 +89,14 @@ def delete_versions(project, version_data): def index_search_request( - version, page_list, commit, project_scale, page_scale, section=True, - delete=True): + version, + page_list, + commit, + project_scale, + page_scale, + section=True, + delete=True, +): """ Update search indexes with build output JSON. @@ -116,7 +126,8 @@ def index_search_request( 'url': project.get_absolute_url(), 'tags': None, 'weight': project_scale, - }) + }, + ) page_obj = PageIndex() section_obj = SectionIndex() diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py index 1351b5c5d57..82b57222048 100644 --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -73,7 +73,9 @@ class ProjectViewSet(UserSelectViewSet): def valid_versions(self, request, **kwargs): """Maintain state of versions that are wanted.""" project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) if (not project.num_major or not project.num_minor or not project.num_point): return Response( @@ -102,7 +104,9 @@ def translations(self, *_, **__): @detail_route() def subprojects(self, request, **kwargs): project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) rels = project.subprojects.all() children = [rel.child for rel in rels] return Response({ @@ -112,7 +116,9 @@ def subprojects(self, request, **kwargs): @detail_route() def active_versions(self, request, **kwargs): project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) versions = project.versions.filter(active=True) return Response({ 'versions': VersionSerializer(versions, many=True).data, @@ -121,7 +127,9 @@ def active_versions(self, request, **kwargs): @decorators.detail_route(permission_classes=[permissions.IsAdminUser]) def token(self, request, **kwargs): project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) token = GitHubService.get_token_for_project(project, force_local=True) return Response({ 'token': token, @@ -130,13 +138,17 @@ def token(self, request, **kwargs): @decorators.detail_route() def canonical_url(self, request, **kwargs): project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) return Response({ 'url': project.get_docs_url(), }) @decorators.detail_route( - permission_classes=[permissions.IsAdminUser], methods=['post']) + permission_classes=[permissions.IsAdminUser], + methods=['post'], + ) def sync_versions(self, request, **kwargs): # noqa: D205 """ Sync the version data in the repo (on the build server). @@ -146,7 +158,9 @@ def sync_versions(self, request, **kwargs): # noqa: D205 :returns: the identifiers for the versions that have been deleted. """ project = get_object_or_404( - Project.objects.api(request.user), pk=kwargs['pk']) + Project.objects.api(request.user), + pk=kwargs['pk'], + ) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. @@ -162,15 +176,21 @@ def sync_versions(self, request, **kwargs): # noqa: D205 added_versions = set() if 'tags' in data: ret_set = api_utils.sync_versions( - project=project, versions=data['tags'], type=TAG) + project=project, + versions=data['tags'], + type=TAG, + ) added_versions.update(ret_set) if 'branches' in data: ret_set = api_utils.sync_versions( - project=project, versions=data['branches'], type=BRANCH) + project=project, + versions=data['branches'], + type=BRANCH, + ) added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception as e: - log.exception('Sync Versions Error: %s', e.message) + log.exception('Sync Versions Error') return Response( { 'error': e.message, @@ -185,7 +205,8 @@ def sync_versions(self, request, **kwargs): # noqa: D205 'Triggering new stable build: {project}:{version}'.format( project=project.slug, version=new_stable.identifier, - )) + ), + ) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as @@ -261,7 +282,9 @@ def get_queryset(self): self.model.objects.api(self.request.user).filter( account__provider__in=[ service.adapter.provider_id for service in registry - ])) + ], + ) + ) class RemoteRepositoryViewSet(viewsets.ReadOnlyModelViewSet): @@ -287,7 +310,8 @@ def get_queryset(self): query = query.filter( account__provider__in=[ service.adapter.provider_id for service in registry - ]) + ], + ) return query diff --git a/readthedocs/rtd_tests/tests/test_core_tags.py b/readthedocs/rtd_tests/tests/test_core_tags.py index a3490536694..bbbe07553ec 100644 --- a/readthedocs/rtd_tests/tests/test_core_tags.py +++ b/readthedocs/rtd_tests/tests/test_core_tags.py @@ -1,23 +1,30 @@ -from __future__ import absolute_import -import mock +# -*- coding: utf-8 -*- +from __future__ import ( + absolute_import, division, print_function, unicode_literals) +import mock from django.test import TestCase from django.test.utils import override_settings -from readthedocs.projects.models import Project from readthedocs.builds.constants import LATEST from readthedocs.core.templatetags import core_tags +from readthedocs.projects.models import Project @override_settings(USE_SUBDOMAIN=False, PUBLIC_DOMAIN='readthedocs.org') class CoreTagsTests(TestCase): - fixtures = ["eric", "test_data"] + fixtures = ['eric', 'test_data'] def setUp(self): with mock.patch('readthedocs.projects.models.broadcast'): self.client.login(username='eric', password='test') self.pip = Project.objects.get(slug='pip') - self.pip_fr = Project.objects.create(name="PIP-FR", slug='pip-fr', language='fr', main_language_project=self.pip) + self.pip_fr = Project.objects.create( + name='PIP-FR', + slug='pip-fr', + language='fr', + main_language_project=self.pip, + ) def test_project_only(self): proj = Project.objects.get(slug='pip') @@ -130,7 +137,10 @@ def test_project_and_version_and_page_signlehtml(self): proj = Project.objects.get(slug='pip') proj.documentation_type = 'sphinx_singlehtml' url = core_tags.make_document_url(proj, 'abc', 'xyz') - self.assertEqual(url, 'http://readthedocs.org/docs/pip/en/abc/index.html#document-xyz') + self.assertEqual( + url, + 'http://readthedocs.org/docs/pip/en/abc/index.html#document-xyz', + ) url = core_tags.make_document_url(proj, 'abc', 'index') self.assertEqual(url, 'http://readthedocs.org/docs/pip/en/abc/') @@ -153,7 +163,10 @@ def test_translation_project_and_version_and_page_singlehtml(self): proj = Project.objects.get(slug='pip-fr') proj.documentation_type = 'sphinx_singlehtml' url = core_tags.make_document_url(proj, 'abc', 'xyz') - self.assertEqual(url, 'http://readthedocs.org/docs/pip/fr/abc/index.html#document-xyz') + self.assertEqual( + url, + 'http://readthedocs.org/docs/pip/fr/abc/index.html#document-xyz', + ) url = core_tags.make_document_url(proj, 'abc', 'index') self.assertEqual(url, 'http://readthedocs.org/docs/pip/fr/abc/') @@ -161,13 +174,19 @@ def test_mkdocs(self): proj = Project.objects.get(slug='pip') proj.documentation_type = 'mkdocs' url = core_tags.make_document_url(proj, LATEST, 'document') - self.assertEqual(url, 'http://readthedocs.org/docs/pip/en/latest/document/') + self.assertEqual( + url, + 'http://readthedocs.org/docs/pip/en/latest/document/', + ) def test_mkdocs_no_directory_urls(self): proj = Project.objects.get(slug='pip') proj.documentation_type = 'mkdocs' url = core_tags.make_document_url(proj, LATEST, 'document.html') - self.assertEqual(url, 'http://readthedocs.org/docs/pip/en/latest/document.html') + self.assertEqual( + url, + 'http://readthedocs.org/docs/pip/en/latest/document.html', + ) def test_mkdocs_index(self): proj = Project.objects.get(slug='pip') diff --git a/readthedocs/rtd_tests/tests/test_privacy.py b/readthedocs/rtd_tests/tests/test_privacy.py index 5a4870e7b5e..70414dd6048 100644 --- a/readthedocs/rtd_tests/tests/test_privacy.py +++ b/readthedocs/rtd_tests/tests/test_privacy.py @@ -1,17 +1,20 @@ -from __future__ import absolute_import -import logging +# -*- coding: utf-8 -*- +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + import json -import mock +import logging +import mock +from django.contrib.auth.models import User from django.test import TestCase from django.test.utils import override_settings -from django.contrib.auth.models import User from readthedocs.builds.constants import LATEST -from readthedocs.builds.models import Version, Build -from readthedocs.projects.models import Project -from readthedocs.projects.forms import UpdateProjectForm +from readthedocs.builds.models import Build, Version from readthedocs.projects import tasks +from readthedocs.projects.forms import UpdateProjectForm +from readthedocs.projects.models import Project log = logging.getLogger(__name__) @@ -29,24 +32,33 @@ def setUp(self): tasks.UpdateDocsTask.delay = mock.Mock() - def _create_kong(self, privacy_level='private', - version_privacy_level='private'): + def _create_kong( + self, + privacy_level='private', + version_privacy_level='private', + ): self.client.login(username='eric', password='test') - log.info(("Making kong with privacy: %s and version privacy: %s" - % (privacy_level, version_privacy_level))) + log.info( + 'Making kong with privacy: %s and version privacy: %s', + privacy_level, + version_privacy_level, + ) # Create project via project form, simulate import wizard without magic form = UpdateProjectForm( - data={'repo_type': 'git', - 'repo': 'https://github.com/ericholscher/django-kong', - 'name': 'Django Kong', - 'language': 'en', - 'default_branch': '', - 'project_url': 'http://django-kong.rtfd.org', - 'default_version': LATEST, - 'python_interpreter': 'python', - 'description': 'OOHHH AH AH AH KONG SMASH', - 'documentation_type': 'sphinx'}, - user=User.objects.get(username='eric')) + data={ + 'repo_type': 'git', + 'repo': 'https://github.com/ericholscher/django-kong', + 'name': 'Django Kong', + 'language': 'en', + 'default_branch': '', + 'project_url': 'http://django-kong.rtfd.org', + 'default_version': LATEST, + 'python_interpreter': 'python', + 'description': 'OOHHH AH AH AH KONG SMASH', + 'documentation_type': 'sphinx', + }, + user=User.objects.get(username='eric'), + ) proj = form.save() # Update these directly, no form has all the fields we need proj.privacy_level = privacy_level @@ -67,9 +79,7 @@ def _create_kong(self, privacy_level='private', def test_private_repo(self): """Check that private projects don't show up in: builds, downloads, - detail, homepage - - """ + detail, homepage.""" self._create_kong('private', 'private') self.client.login(username='eric', password='test') @@ -96,9 +106,7 @@ def test_private_repo(self): def test_public_repo(self): """Check that public projects show up in: builds, downloads, detail, - homepage - - """ + homepage.""" self._create_kong('public', 'public') self.client.login(username='eric', password='test') @@ -129,10 +137,19 @@ def test_private_branch(self): kong = self._create_kong('public', 'private') self.client.login(username='eric', password='test') - Version.objects.create(project=kong, identifier='test id', - verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True) + Version.objects.create( + project=kong, + identifier='test id', + verbose_name='test verbose', + privacy_level='private', + slug='test-slug', + active=True, + ) self.assertEqual(Version.objects.count(), 2) - self.assertEqual(Version.objects.get(slug='test-slug').privacy_level, 'private') + self.assertEqual( + Version.objects.get(slug='test-slug').privacy_level, + 'private', + ) r = self.client.get('/projects/django-kong/') self.assertContains(r, 'test-slug') r = self.client.get('/projects/django-kong/builds/') @@ -149,9 +166,14 @@ def test_public_branch(self): kong = self._create_kong('public', 'public') self.client.login(username='eric', password='test') - Version.objects.create(project=kong, identifier='test id', - verbose_name='test verbose', slug='test-slug', - active=True, built=True) + Version.objects.create( + project=kong, + identifier='test id', + verbose_name='test verbose', + slug='test-slug', + active=True, + built=True, + ) self.assertEqual(Version.objects.count(), 2) self.assertEqual(Version.objects.all()[0].privacy_level, 'public') r = self.client.get('/projects/django-kong/') @@ -165,22 +187,30 @@ def test_public_branch(self): def test_public_repo_api(self): self._create_kong('public', 'public') self.client.login(username='eric', password='test') - resp = self.client.get("http://testserver/api/v1/project/django-kong/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/django-kong/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) - resp = self.client.get("http://testserver/api/v1/project/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) data = json.loads(resp.content) self.assertEqual(data['meta']['total_count'], 1) self.client.login(username='tester', password='test') - resp = self.client.get("http://testserver/api/v1/project/django-kong/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/django-kong/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) - resp = self.client.get("http://testserver/api/v1/project/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) data = json.loads(resp.content) self.assertEqual(data['meta']['total_count'], 1) @@ -188,21 +218,29 @@ def test_public_repo_api(self): def test_private_repo_api(self): self._create_kong('private', 'private') self.client.login(username='eric', password='test') - resp = self.client.get("http://testserver/api/v1/project/django-kong/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/django-kong/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) - resp = self.client.get("http://testserver/api/v1/project/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) data = json.loads(resp.content) self.assertEqual(data['meta']['total_count'], 1) self.client.login(username='tester', password='test') - resp = self.client.get("http://testserver/api/v1/project/django-kong/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/django-kong/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 404) - resp = self.client.get("http://testserver/api/v1/project/", - data={"format": "json"}) + resp = self.client.get( + 'http://testserver/api/v1/project/', + data={'format': 'json'}, + ) self.assertEqual(resp.status_code, 200) data = json.loads(resp.content) self.assertEqual(data['meta']['total_count'], 0) @@ -211,11 +249,18 @@ def test_private_doc_serving(self): kong = self._create_kong('public', 'private') self.client.login(username='eric', password='test') - Version.objects.create(project=kong, identifier='test id', - verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True) - self.client.post('/dashboard/django-kong/versions/', - {'version-test-slug': 'on', - 'privacy-test-slug': 'private'}) + Version.objects.create( + project=kong, + identifier='test id', + verbose_name='test verbose', + privacy_level='private', + slug='test-slug', + active=True, + ) + self.client.post( + '/dashboard/django-kong/versions/', + {'version-test-slug': 'on', 'privacy-test-slug': 'private'}, + ) r = self.client.get('/docs/django-kong/en/test-slug/') self.client.login(username='eric', password='test') self.assertEqual(r.status_code, 404) @@ -244,7 +289,10 @@ def test_private_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf', + ) @override_settings(DEFAULT_PRIVACY_LEVEL='private') def test_private_public_repo_downloading(self): @@ -256,7 +304,10 @@ def test_private_public_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf', + ) # Auth'd user self.client.login(username='eric', password='test') @@ -264,7 +315,10 @@ def test_private_public_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf', + ) @override_settings(DEFAULT_PRIVACY_LEVEL='private') def test_private_download_filename(self): @@ -273,20 +327,39 @@ def test_private_download_filename(self): self.client.login(username='eric', password='test') r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf') - self.assertEqual(r._headers['content-disposition'][1], 'filename=django-kong-latest.pdf') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/pdf/django-kong/latest/django-kong.pdf', + ) + self.assertEqual( + r._headers['content-disposition'][1], + 'filename=django-kong-latest.pdf', + ) r = self.client.get('/projects/django-kong/downloads/epub/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/epub/django-kong/latest/django-kong.epub') - self.assertEqual(r._headers['content-disposition'][1], 'filename=django-kong-latest.epub') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/epub/django-kong/latest/django-kong.epub', + ) + self.assertEqual( + r._headers['content-disposition'][1], + 'filename=django-kong-latest.epub', + ) r = self.client.get('/projects/django-kong/downloads/htmlzip/latest/') self.assertEqual(r.status_code, 200) - self.assertEqual(r._headers['x-accel-redirect'][1], '/prod_artifacts/media/htmlzip/django-kong/latest/django-kong.zip') - self.assertEqual(r._headers['content-disposition'][1], 'filename=django-kong-latest.zip') + self.assertEqual( + r._headers['x-accel-redirect'][1], + '/prod_artifacts/media/htmlzip/django-kong/latest/django-kong.zip', + ) + self.assertEqual( + r._headers['content-disposition'][1], + 'filename=django-kong-latest.zip', + ) # Public download tests + @override_settings(DEFAULT_PRIVACY_LEVEL='public') def test_public_repo_downloading(self): self._create_kong('public', 'public') @@ -297,7 +370,10 @@ def test_public_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['location'][1], + '/media/pdf/django-kong/latest/django-kong.pdf', + ) # Auth'd user self.client.login(username='eric', password='test') @@ -305,7 +381,10 @@ def test_public_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['location'][1], + '/media/pdf/django-kong/latest/django-kong.pdf', + ) @override_settings(DEFAULT_PRIVACY_LEVEL='public') def test_public_private_repo_downloading(self): @@ -324,7 +403,10 @@ def test_public_private_repo_downloading(self): self.assertEqual(r.status_code, 200) r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['location'][1], + '/media/pdf/django-kong/latest/django-kong.pdf', + ) @override_settings(DEFAULT_PRIVACY_LEVEL='public') def test_public_download_filename(self): @@ -333,15 +415,25 @@ def test_public_download_filename(self): self.client.login(username='eric', password='test') r = self.client.get('/projects/django-kong/downloads/pdf/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/pdf/django-kong/latest/django-kong.pdf') + self.assertEqual( + r._headers['location'][1], + '/media/pdf/django-kong/latest/django-kong.pdf', + ) r = self.client.get('/projects/django-kong/downloads/epub/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/epub/django-kong/latest/django-kong.epub') + self.assertEqual( + r._headers['location'][1], + '/media/epub/django-kong/latest/django-kong.epub', + ) r = self.client.get('/projects/django-kong/downloads/htmlzip/latest/') self.assertEqual(r.status_code, 302) - self.assertEqual(r._headers['location'][1], '/media/htmlzip/django-kong/latest/django-kong.zip') + self.assertEqual( + r._headers['location'][1], + '/media/htmlzip/django-kong/latest/django-kong.zip', + ) + # Build Filtering @@ -349,8 +441,14 @@ def test_build_filtering(self): kong = self._create_kong('public', 'private') self.client.login(username='eric', password='test') - ver = Version.objects.create(project=kong, identifier='test id', - verbose_name='test verbose', privacy_level='private', slug='test-slug', active=True) + ver = Version.objects.create( + project=kong, + identifier='test id', + verbose_name='test verbose', + privacy_level='private', + slug='test-slug', + active=True, + ) r = self.client.get('/projects/django-kong/builds/') self.assertContains(r, 'test-slug') @@ -365,11 +463,9 @@ def test_build_filtering(self): self.assertNotContains(r, 'test-slug') def test_queryset_chaining(self): - """ - Test that manager methods get set on related querysets. - """ + """Test that manager methods get set on related querysets.""" kong = self._create_kong('public', 'private') self.assertEqual( kong.versions.private().get(slug='latest').slug, - 'latest' + 'latest', ) diff --git a/readthedocs/rtd_tests/tests/test_project.py b/readthedocs/rtd_tests/tests/test_project.py index e1c83e0ba92..b9b02745405 100644 --- a/readthedocs/rtd_tests/tests/test_project.py +++ b/readthedocs/rtd_tests/tests/test_project.py @@ -81,23 +81,28 @@ def test_translation_delete(self): self.assertFalse(Project.objects.filter(pk=project_delete.pk).exists()) self.assertTrue(Project.objects.filter(pk=project_keep.pk).exists()) self.assertIsNone( - Project.objects.get(pk=project_keep.pk).main_language_project) + Project.objects.get(pk=project_keep.pk).main_language_project, + ) def test_user_can_add_own_project_as_translation(self): user_a = User.objects.get(username='eric') project_a = get( - Project, users=[user_a], - language='en', main_language_project=None + Project, + users=[user_a], + language='en', + main_language_project=None, ) project_b = get( - Project, users=[user_a], - language='es', main_language_project=None + Project, + users=[user_a], + language='es', + main_language_project=None, ) self.client.login(username=user_a.username, password='test') self.client.post( reverse('projects_translations', args=[project_a.slug]), - data={'project': project_b.slug} + data={'project': project_b.slug}, ) self.assertEqual(project_a.translations.first(), project_b) @@ -108,21 +113,25 @@ def test_user_can_add_project_as_translation_if_is_owner(self): # Two users, two projects with different language user_a = User.objects.get(username='eric') project_a = get( - Project, users=[user_a], - language='es', main_language_project=None + Project, + users=[user_a], + language='es', + main_language_project=None, ) user_b = User.objects.get(username='tester') # User A and B are owners of project B project_b = get( - Project, users=[user_b, user_a], - language='en', main_language_project=None + Project, + users=[user_b, user_a], + language='en', + main_language_project=None, ) self.client.login(username=user_a.username, password='test') self.client.post( reverse('projects_translations', args=[project_a.slug]), - data={'project': project_b.slug} + data={'project': project_b.slug}, ) self.assertEqual(project_a.translations.first(), project_b) @@ -131,21 +140,25 @@ def test_user_can_not_add_other_user_project_as_translation(self): # Two users, two projects with different language user_a = User.objects.get(username='eric') project_a = get( - Project, users=[user_a], - language='es', main_language_project=None + Project, + users=[user_a], + language='es', + main_language_project=None, ) user_b = User.objects.get(username='tester') project_b = get( - Project, users=[user_b], - language='en', main_language_project=None + Project, + users=[user_b], + language='en', + main_language_project=None, ) # User A try to add project B as translation of project A self.client.login(username=user_a.username, password='test') resp = self.client.post( reverse('projects_translations', args=[project_a.slug]), - data={'project': project_b.slug} + data={'project': project_b.slug}, ) self.assertContains(resp, 'Select a valid choice') @@ -154,20 +167,22 @@ def test_user_can_not_add_other_user_project_as_translation(self): self.assertIsNone(project_b.main_language_project) def test_previous_users_can_list_and_delete_translations_not_owner(self): - """ - Test to make sure that previous users can list and delete - projects where they aren't owners. - """ + """Test to make sure that previous users can list and delete projects + where they aren't owners.""" user_a = User.objects.get(username='eric') project_a = get( - Project, users=[user_a], - language='es', main_language_project=None + Project, + users=[user_a], + language='es', + main_language_project=None, ) user_b = User.objects.get(username='tester') project_b = get( - Project, users=[user_b], - language='en', main_language_project=None + Project, + users=[user_b], + language='en', + main_language_project=None, ) project_a.translations.add(project_b) @@ -177,16 +192,16 @@ def test_previous_users_can_list_and_delete_translations_not_owner(self): # Project B is listed under user A translations resp = self.client.get( - reverse('projects_translations', args=[project_a.slug]) + reverse('projects_translations', args=[project_a.slug]), ) self.assertContains(resp, project_b.slug) resp = self.client.post( reverse( 'projects_translations_delete', - args=[project_a.slug, project_b.slug] + args=[project_a.slug, project_b.slug], ), - follow=True + follow=True, ) self.assertEqual(resp.status_code, 200) self.assertNotIn(project_b, project_a.translations.all()) @@ -194,12 +209,16 @@ def test_previous_users_can_list_and_delete_translations_not_owner(self): def test_user_cant_delete_other_user_translations(self): user_a = User.objects.get(username='eric') project_a = get( - Project, users=[user_a], - language='es', main_language_project=None + Project, + users=[user_a], + language='es', + main_language_project=None, ) project_b = get( - Project, users=[user_a], - language='en', main_language_project=None + Project, + users=[user_a], + language='en', + main_language_project=None, ) project_a.translations.add(project_b) @@ -207,12 +226,16 @@ def test_user_cant_delete_other_user_translations(self): user_b = User.objects.get(username='tester') project_c = get( - Project, users=[user_b], - language='es', main_language_project=None + Project, + users=[user_b], + language='es', + main_language_project=None, ) project_d = get( - Project, users=[user_b, user_a], - language='en', main_language_project=None + Project, + users=[user_b, user_a], + language='en', + main_language_project=None, ) project_d.translations.add(project_c) project_d.save() @@ -223,9 +246,9 @@ def test_user_cant_delete_other_user_translations(self): resp = self.client.post( reverse( 'projects_translations_delete', - args=[project_a.slug, project_b.slug] + args=[project_a.slug, project_b.slug], ), - follow=True + follow=True, ) self.assertEqual(resp.status_code, 404) self.assertIn(project_b, project_a.translations.all()) @@ -237,9 +260,9 @@ def test_user_cant_delete_other_user_translations(self): resp = self.client.post( reverse( 'projects_translations_delete', - args=[project_d.slug, project_b.slug] + args=[project_d.slug, project_b.slug], ), - follow=True + follow=True, ) self.assertEqual(resp.status_code, 404) self.assertIn(project_b, project_a.translations.all()) @@ -251,9 +274,9 @@ def test_user_cant_delete_other_user_translations(self): resp = self.client.post( reverse( 'projects_translations_delete', - args=[project_b.slug, project_b.slug] + args=[project_b.slug, project_b.slug], ), - follow=True + follow=True, ) self.assertEqual(resp.status_code, 404) self.assertIn(project_b, project_a.translations.all()) @@ -318,7 +341,8 @@ def test_multiple_conf_file_one_doc_in_path(self, find_method): def test_conf_file_not_found(self): with self.assertRaisesMessage( ProjectConfigurationError, - ProjectConfigurationError.NOT_FOUND) as cm: + ProjectConfigurationError.NOT_FOUND, + ) as cm: self.pip.conf_file() @patch('readthedocs.projects.models.Project.find') @@ -330,7 +354,8 @@ def test_multiple_conf_files(self, find_method): ] with self.assertRaisesMessage( ProjectConfigurationError, - ProjectConfigurationError.MULTIPLE_CONF_FILES) as cm: + ProjectConfigurationError.MULTIPLE_CONF_FILES, + ) as cm: self.pip.conf_file() @@ -359,7 +384,8 @@ def setUp(self): state=BUILD_STATE_TRIGGERED, ) self.build_2.date = ( - datetime.datetime.now() - datetime.timedelta(hours=1)) + datetime.datetime.now() - datetime.timedelta(hours=1) + ) self.build_2.save() # Build started an hour ago with custom time (2 hours) @@ -369,7 +395,8 @@ def setUp(self): state=BUILD_STATE_TRIGGERED, ) self.build_3.date = ( - datetime.datetime.now() - datetime.timedelta(hours=1)) + datetime.datetime.now() - datetime.timedelta(hours=1) + ) self.build_3.save() def test_finish_inactive_builds_task(self): diff --git a/readthedocs/rtd_tests/utils.py b/readthedocs/rtd_tests/utils.py index a2a4cded84f..08d6faeaaff 100644 --- a/readthedocs/rtd_tests/utils.py +++ b/readthedocs/rtd_tests/utils.py @@ -1,17 +1,19 @@ +# -*- coding: utf-8 -*- """Utility functions for use in tests.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) import logging import subprocess from os import chdir, environ, getcwd, mkdir -from os.path import abspath, join as pjoin +from os.path import abspath +from os.path import join as pjoin from shutil import copytree from tempfile import mkdtemp -from django_dynamic_fixture import new from django.contrib.auth.models import User - +from django_dynamic_fixture import new log = logging.getLogger(__name__) @@ -20,8 +22,11 @@ def check_output(l, env=()): if env == (): output = subprocess.Popen(l, stdout=subprocess.PIPE).communicate()[0] else: - output = subprocess.Popen(l, stdout=subprocess.PIPE, - env=env).communicate()[0] + output = subprocess.Popen( + l, + stdout=subprocess.PIPE, + env=env, + ).communicate()[0] return output @@ -36,15 +41,17 @@ def make_test_git(): chdir(directory) # Initialize and configure + # TODO: move the ``log.info`` call inside the ``check_output``` log.info(check_output(['git', 'init'] + [directory], env=env)) - log.info(check_output( - ['git', 'config', 'user.email', 'dev@readthedocs.org'], - env=env - )) - log.info(check_output( - ['git', 'config', 'user.name', 'Read the Docs'], - env=env - )) + log.info( + check_output( + ['git', 'config', 'user.email', 'dev@readthedocs.org'], + env=env, + ), + ) + log.info( + check_output(['git', 'config', 'user.name', 'Read the Docs'], env=env), + ) # Set up the actual repository log.info(check_output(['git', 'add', '.'], env=env)) @@ -53,38 +60,83 @@ def make_test_git(): # Add fake repo as submodule. We need to fake this here because local path # URL are not allowed and using a real URL will require Internet to clone # the repo - log.info(check_output(['git', 'checkout', '-b', 'submodule', 'master'], env=env)) + log.info( + check_output(['git', 'checkout', '-b', 'submodule', 'master'], env=env), + ) # https://stackoverflow.com/a/37378302/2187091 mkdir(pjoin(directory, 'foobar')) gitmodules_path = pjoin(directory, '.gitmodules') with open(gitmodules_path, 'w') as fh: - fh.write('''[submodule "foobar"]\n\tpath = foobar\n\turl = https://foobar.com/git\n''') - log.info(check_output( - [ - 'git', 'update-index', '--add', '--cacheinfo', '160000', - '233febf4846d7a0aeb95b6c28962e06e21d13688', 'foobar', - ], - env=env, - )) + fh.write( + '''[submodule "foobar"]\n\tpath = foobar\n\turl = https://foobar.com/git\n''', + ) + log.info( + check_output( + [ + 'git', + 'update-index', + '--add', + '--cacheinfo', + '160000', + '233febf4846d7a0aeb95b6c28962e06e21d13688', + 'foobar', + ], + env=env, + ), + ) log.info(check_output(['git', 'add', '.'], env=env)) log.info(check_output(['git', 'commit', '-m"Add submodule"'], env=env)) # Add a relative submodule URL in the relativesubmodule branch - log.info(check_output(['git', 'checkout', '-b', 'relativesubmodule', 'master'], env=env)) - log.info(check_output( - ['git', 'submodule', 'add', '-b', 'master', './', 'relativesubmodule'], - env=env - )) + log.info( + check_output( + ['git', 'checkout', '-b', 'relativesubmodule', 'master'], + env=env, + ), + ) + log.info( + check_output( + [ + 'git', + 'submodule', + 'add', + '-b', + 'master', + './', + 'relativesubmodule', + ], + env=env, + ), + ) log.info(check_output(['git', 'add', '.'], env=env)) - log.info(check_output(['git', 'commit', '-m"Add relative submodule"'], env=env)) + log.info( + check_output(['git', 'commit', '-m"Add relative submodule"'], env=env), + ) # Add an invalid submodule URL in the invalidsubmodule branch - log.info(check_output(['git', 'checkout', '-b', 'invalidsubmodule', 'master'], env=env)) - log.info(check_output( - ['git', 'submodule', 'add', '-b', 'master', './', 'invalidsubmodule'], - env=env, - )) + log.info( + check_output( + ['git', 'checkout', '-b', 'invalidsubmodule', 'master'], + env=env, + ), + ) + log.info( + check_output( + [ + 'git', + 'submodule', + 'add', + '-b', + 'master', + './', + 'invalidsubmodule', + ], + env=env, + ), + ) log.info(check_output(['git', 'add', '.'], env=env)) - log.info(check_output(['git', 'commit', '-m"Add invalid submodule"'], env=env)) + log.info( + check_output(['git', 'commit', '-m"Add invalid submodule"'], env=env), + ) # Checkout to master branch again log.info(check_output(['git', 'checkout', 'master'], env=env)) diff --git a/readthedocs/search/parse_json.py b/readthedocs/search/parse_json.py index 196caf2bd12..1fae4dca768 100644 --- a/readthedocs/search/parse_json.py +++ b/readthedocs/search/parse_json.py @@ -1,12 +1,13 @@ # -*- coding: utf-8 -*- """Functions related to converting content into dict/JSON structures.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) -import logging import codecs import fnmatch import json +import logging import os from builtins import next, range # pylint: disable=redefined-builtin @@ -16,16 +17,22 @@ def process_all_json_files(version, build_dir=True): - """Return a list of pages to index""" + """Return a list of pages to index.""" if build_dir: full_path = version.project.full_json_path(version.slug) else: full_path = version.project.get_production_media_path( - type_='json', version_slug=version.slug, include_file=False) + type_='json', + version_slug=version.slug, + include_file=False, + ) html_files = [] for root, _, files in os.walk(full_path): for filename in fnmatch.filter(files, '*.fjson'): - if filename in ['search.fjson', 'genindex.fjson', 'py-modindex.fjson']: + if filename in [ + 'search.fjson', + 'genindex.fjson', + 'py-modindex.fjson', ]: continue html_files.append(os.path.join(root, filename)) page_list = [] @@ -60,13 +67,13 @@ def generate_sections_from_pyquery(body): div = h1_section.parent() h1_title = h1_section.text().replace(u'¶', '').strip() h1_id = div.attr('id') - h1_content = "" + h1_content = '' next_p = body('h1').next() while next_p: if next_p[0].tag == 'div' and 'class' in next_p[0].attrib: if 'section' in next_p[0].attrib['class']: break - h1_content += "\n%s\n" % next_p.html() + h1_content += '\n%s\n' % next_p.html() next_p = next_p.next() if h1_content: yield { @@ -96,7 +103,7 @@ def process_file(filename): with codecs.open(filename, encoding='utf-8', mode='r') as f: file_contents = f.read() except IOError as e: - log.info('Unable to index file: %s, error :%s', filename, e) + log.info('Unable to index file: %s', filename, exc_info=True) return data = json.loads(file_contents) sections = [] @@ -120,9 +127,13 @@ def process_file(filename): else: log.info('Unable to index title for: %s', filename) - return {'headers': process_headers(data, filename), - 'content': body_content, 'path': path, - 'title': title, 'sections': sections} + return { + 'headers': process_headers(data, filename), + 'content': body_content, + 'path': path, + 'title': title, + 'sections': sections, + } def recurse_while_none(element): diff --git a/readthedocs/search/utils.py b/readthedocs/search/utils.py index 57e700a0d83..f982cae6f49 100644 --- a/readthedocs/search/utils.py +++ b/readthedocs/search/utils.py @@ -1,19 +1,19 @@ # -*- coding: utf-8 -*- """Utilities related to reading and generating indexable search content.""" -from __future__ import absolute_import +from __future__ import ( + absolute_import, division, print_function, unicode_literals) -import os -import fnmatch -import re import codecs -import logging +import fnmatch import json +import logging +import os +import re from builtins import next, range from pyquery import PyQuery - log = logging.getLogger(__name__) @@ -23,7 +23,10 @@ def process_mkdocs_json(version, build_dir=True): full_path = version.project.full_json_path(version.slug) else: full_path = version.project.get_production_media_path( - type_='json', version_slug=version.slug, include_file=False) + type_='json', + version_slug=version.slug, + include_file=False, + ) html_files = [] for root, _, files in os.walk(full_path): @@ -35,8 +38,14 @@ def process_mkdocs_json(version, build_dir=True): continue relative_path = parse_path_from_file(file_path=filename) html = parse_content_from_file(file_path=filename) - headers = parse_headers_from_file(documentation_type='mkdocs', file_path=filename) - sections = parse_sections_from_file(documentation_type='mkdocs', file_path=filename) + headers = parse_headers_from_file( + documentation_type='mkdocs', + file_path=filename, + ) + sections = parse_sections_from_file( + documentation_type='mkdocs', + file_path=filename, + ) try: title = sections[0]['title'] except IndexError: @@ -62,13 +71,22 @@ def valid_mkdocs_json(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.warning('(Search Index) Unable to index file: %s, error: %s', file_path, e) + log.warning( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return None + # TODO: wrap this in a try/except block and use ``exc_info=True`` in the + # ``log.warning`` call page_json = json.loads(content) for to_check in ['url', 'content']: if to_check not in page_json: - log.warning('(Search Index) Unable to index file: %s error: Invalid JSON', file_path) + log.warning( + '(Search Index) Unable to index file: %s error: Invalid JSON', + file_path, + ) return None return True @@ -80,9 +98,14 @@ def parse_path_from_file(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.warning('(Search Index) Unable to index file: %s, error: %s', file_path, e) + log.warning( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) path = page_json['url'] @@ -104,15 +127,23 @@ def parse_content_from_file(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] content = parse_content(page_content) if not content: - log.info('(Search Index) Unable to index file: %s, empty file', file_path) + log.info( + '(Search Index) Unable to index file: %s, empty file', + file_path, + ) else: log.debug('(Search Index) %s length: %s', file_path, len(content)) return content @@ -137,10 +168,14 @@ def parse_headers_from_file(documentation_type, file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', - file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] headers = parse_headers(documentation_type, page_content) @@ -164,9 +199,14 @@ def parse_sections_from_file(documentation_type, file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] sections = parse_sections(documentation_type, page_content) @@ -184,13 +224,13 @@ def parse_sphinx_sections(content): div = h1_section.parent() h1_title = h1_section.text().replace(u'¶', '').strip() h1_id = div.attr('id') - h1_content = "" + h1_content = '' next_p = next(body('h1')) # pylint: disable=stop-iteration-return while next_p: if next_p[0].tag == 'div' and 'class' in next_p[0].attrib: if 'section' in next_p[0].attrib['class']: break - h1_content += "\n%s\n" % next_p.html() + h1_content += '\n%s\n' % next_p.html() next_p = next(next_p) # pylint: disable=stop-iteration-return if h1_content: yield { @@ -227,14 +267,14 @@ def parse_mkdocs_sections(content): h1 = body('h1') h1_id = h1.attr('id') h1_title = h1.text().strip() - h1_content = "" + h1_content = '' next_p = next(body('h1')) # pylint: disable=stop-iteration-return while next_p: if next_p[0].tag == 'h2': break h1_html = next_p.html() if h1_html: - h1_content += "\n%s\n" % h1_html + h1_content += '\n%s\n' % h1_html next_p = next(next_p) # pylint: disable=stop-iteration-return if h1_content: yield { @@ -249,14 +289,14 @@ def parse_mkdocs_sections(content): h2 = section_list.eq(num) h2_title = h2.text().strip() section_id = h2.attr('id') - h2_content = "" + h2_content = '' next_p = next(body('h2')) # pylint: disable=stop-iteration-return while next_p: if next_p[0].tag == 'h2': break h2_html = next_p.html() if h2_html: - h2_content += "\n%s\n" % h2_html + h2_content += '\n%s\n' % h2_html next_p = next(next_p) # pylint: disable=stop-iteration-return if h2_content: yield { diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index e0c1b933d84..84fcc852b83 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -6,10 +6,9 @@ import os -from readthedocs.core.settings import Settings - from celery.schedules import crontab +from readthedocs.core.settings import Settings try: import readthedocsext # noqa @@ -156,7 +155,8 @@ def USE_PROMOS(self): # noqa # Paths SITE_ROOT = os.path.dirname( - os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + os.path.dirname(os.path.dirname(os.path.abspath(__file__))), + ) TEMPLATE_ROOT = os.path.join(SITE_ROOT, 'readthedocs', 'templates') DOCROOT = os.path.join(SITE_ROOT, 'user_builds') UPLOAD_ROOT = os.path.join(SITE_ROOT, 'user_uploads') @@ -202,7 +202,7 @@ def USE_PROMOS(self): # noqa 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'PREFIX': 'docs', - } + }, } CACHE_MIDDLEWARE_SECONDS = 60 @@ -290,7 +290,7 @@ def USE_PROMOS(self): # noqa # CORS CORS_ORIGIN_REGEX_WHITELIST = ( '^http://(.+)\.readthedocs\.io$', - '^https://(.+)\.readthedocs\.io$' + '^https://(.+)\.readthedocs\.io$', ) # So people can post to their accounts CORS_ALLOW_CREDENTIALS = True @@ -300,7 +300,7 @@ def USE_PROMOS(self): # noqa 'accept', 'origin', 'authorization', - 'x-csrftoken' + 'x-csrftoken', ) # RTD Settings @@ -325,7 +325,7 @@ def USE_PROMOS(self): # noqa ALLOWED_HOSTS = ['*'] ABSOLUTE_URL_OVERRIDES = { - 'auth.user': lambda o: '/profiles/{}/'.format(o.username) + 'auth.user': lambda o: '/profiles/{}/'.format(o.username), } INTERNAL_IPS = ('127.0.0.1',) @@ -379,7 +379,7 @@ def USE_PROMOS(self): # noqa 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', - 'formatter': 'default' + 'formatter': 'default', }, 'debug': { 'level': 'DEBUG', diff --git a/readthedocs/vcs_support/utils.py b/readthedocs/vcs_support/utils.py index 0525156c0d2..ced7c13f70d 100644 --- a/readthedocs/vcs_support/utils.py +++ b/readthedocs/vcs_support/utils.py @@ -1,10 +1,15 @@ +# -*- coding: utf-8 -*- """Locking utilities.""" -from __future__ import absolute_import -from builtins import object +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + +import errno import logging import os -import time import stat +import time + +from builtins import object log = logging.getLogger(__name__) @@ -16,7 +21,7 @@ class LockTimeout(Exception): class Lock(object): """ - A simple file based lock with timeout + A simple file based lock with timeout. On entering the context, it will try to acquire the lock. If timeout passes, it just gets the lock anyway. @@ -26,7 +31,10 @@ class Lock(object): def __init__(self, project, version, timeout=5, polling_interval=0.1): self.name = project.slug - self.fpath = os.path.join(project.doc_path, '%s__rtdlock' % version.slug) + self.fpath = os.path.join( + project.doc_path, + '%s__rtdlock' % version.slug, + ) self.timeout = timeout self.polling_interval = polling_interval @@ -35,29 +43,38 @@ def __enter__(self): while os.path.exists(self.fpath): lock_age = time.time() - os.stat(self.fpath)[stat.ST_MTIME] if lock_age > self.timeout: - log.info("Lock (%s): Force unlock, old lockfile", - self.name) + log.info('Lock (%s): Force unlock, old lockfile', self.name) os.remove(self.fpath) break - log.info("Lock (%s): Locked, waiting..", self.name) + log.info('Lock (%s): Locked, waiting..', self.name) time.sleep(self.polling_interval) timesince = time.time() - start if timesince > self.timeout: - log.info("Lock (%s): Force unlock, timeout reached", - self.name) + log.info('Lock (%s): Force unlock, timeout reached', self.name) os.remove(self.fpath) break - log.info("%s still locked after %.2f seconds; retry for %.2f" - " seconds", self.name, timesince, self.timeout) + log.info( + '%s still locked after %.2f seconds; retry for %.2f' + ' seconds', + self.name, + timesince, + self.timeout, + ) open(self.fpath, 'w').close() - log.info("Lock (%s): Lock acquired", self.name) + log.info('Lock (%s): Lock acquired', self.name) def __exit__(self, exc, value, tb): try: - log.info("Lock (%s): Releasing", self.name) + log.info('Lock (%s): Releasing', self.name) os.remove(self.fpath) - except OSError: - log.exception("Lock (%s): Failed to release, ignoring...", self.name) + except OSError as e: + # We want to ignore "No such file or directory" and log any other + # type of error. + if e.errno != errno.ENOENT: + log.exception( + 'Lock (%s): Failed to release, ignoring...', + self.name, + ) class NonBlockingLock(object): @@ -75,7 +92,10 @@ class NonBlockingLock(object): """ def __init__(self, project, version, max_lock_age=None): - self.fpath = os.path.join(project.doc_path, '%s__rtdlock' % version.slug) + self.fpath = os.path.join( + project.doc_path, + '%s__rtdlock' % version.slug, + ) self.max_lock_age = max_lock_age self.name = project.slug @@ -84,22 +104,27 @@ def __enter__(self): if path_exists and self.max_lock_age is not None: lock_age = time.time() - os.stat(self.fpath)[stat.ST_MTIME] if lock_age > self.max_lock_age: - log.info("Lock (%s): Force unlock, old lockfile", - self.name) + log.info('Lock (%s): Force unlock, old lockfile', self.name) os.remove(self.fpath) else: raise LockTimeout( - "Lock ({}): Lock still active".format(self.name)) + 'Lock ({}): Lock still active'.format(self.name), + ) elif path_exists: - raise LockTimeout( - "Lock ({}): Lock still active".format(self.name)) + raise LockTimeout('Lock ({}): Lock still active'.format(self.name)) open(self.fpath, 'w').close() return self def __exit__(self, exc_type, exc_val, exc_tb): try: - log.info("Lock (%s): Releasing", self.name) + log.info('Lock (%s): Releasing', self.name) os.remove(self.fpath) - except (IOError, OSError): - log.error("Lock (%s): Failed to release, ignoring...", self.name, - exc_info=True) + except (IOError, OSError) as e: + # We want to ignore "No such file or directory" and log any other + # type of error. + if e.errno != errno.ENOENT: + log.error( + 'Lock (%s): Failed to release, ignoring...', + self.name, + exc_info=True, + ) diff --git a/setup.cfg b/setup.cfg index 5eb8d7b3625..437c2be814f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,7 @@ description = Read the Docs builds and hosts documentation author = Read the Docs, Inc author_email = dev@readthedocs.com url = http://readthedocs.org -classifiers = +classifiers = Development Status :: 5 - Production/Stable Environment :: Web Environment Intended Audience :: Developers @@ -23,4 +23,3 @@ classifiers = packages = find: include_package_data = True zip_safe = False - diff --git a/tasks.py b/tasks.py index b4eef5c5eaa..4cf40e994e3 100644 --- a/tasks.py +++ b/tasks.py @@ -8,7 +8,7 @@ from dateutil.parser import parse from future.moves.configparser import RawConfigParser -from invoke import task, Exit +from invoke import Exit, task @task @@ -85,7 +85,10 @@ def prepare(ctx, version, since): 'In order to avoid rate limiting on the GitHub API, you can specify ' 'an environment variable `GITHUB_TOKEN` with a personal access token. ' 'There is no need for the token to have any permissions unless the ' - 'repoistory is private.'))) + 'repoistory is private.', + ), + ), + ) print('') print('Updating changelog') ctx.run(cmd) @@ -98,6 +101,5 @@ def release(ctx, version): Do this after prepare task and manual cleanup/commit """ - ctx.run( - ('git tag {version} && ' - 'git push --tags').format(version=version)) + ctx.run(('git tag {version} && ' + 'git push --tags').format(version=version),)