Skip to content

Commit

Permalink
Clean up mkdocs builder and search
Browse files Browse the repository at this point in the history
  • Loading branch information
ericholscher committed Oct 13, 2014
1 parent 645cf4d commit 92a183a
Show file tree
Hide file tree
Showing 6 changed files with 159 additions and 106 deletions.
79 changes: 31 additions & 48 deletions readthedocs/doc_builder/backends/mkdocs.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,31 +17,25 @@
log = logging.getLogger(__name__)


class Builder(BaseBuilder):
class BaseMkdocs(BaseBuilder):

"""
Mkdocs builder
"""
type = 'mkdocs'

def __init__(self, *args, **kwargs):
super(Builder, self).__init__(*args, **kwargs)
self.old_artifact_path = os.path.join(
self.version.project.checkout_path(self.version.slug), 'site')
super(BaseMkdocs, self).__init__(*args, **kwargs)
self.old_artifact_path = os.path.join(self.version.project.checkout_path(self.version.slug), self.build_dir)

@restoring_chdir
def build(self, **kwargs):
project = self.version.project
checkout_path = project.checkout_path(self.version.slug)
site_path = os.path.join(checkout_path, 'site')
os.chdir(checkout_path)
def append_conf(self, **kwargs):
"""
Set mkdocs config values
"""

# Pull mkdocs config data
user_config = yaml.safe_load(open('mkdocs.yml', 'r'))
docs_dir = user_config.get('docs_dir', 'docs')

# Set mkdocs config values

MEDIA_URL = getattr(
settings, 'MEDIA_URL', 'https://media.readthedocs.org')
if 'extra_javascript' in user_config:
Expand Down Expand Up @@ -84,9 +78,9 @@ def build(self, **kwargs):
# RTD javascript writing

READTHEDOCS_DATA = {
'project': project.slug,
'project': self.version.project.slug,
'version': self.version.slug,
'language': project.language,
'language': self.version.project.language,
'page': None,
'theme': "readthedocs",
'docroot': docs_dir,
Expand All @@ -112,7 +106,7 @@ def build(self, **kwargs):

include_ctx = Context({
'global_analytics_code': getattr(settings, 'GLOBAL_ANALYTICS_CODE', 'UA-17997319-1'),
'user_analytics_code': project.analytics_code,
'user_analytics_code': self.version.project.analytics_code,
})
include_string = template_loader.get_template(
'doc_builder/include.js.tmpl'
Expand All @@ -121,40 +115,29 @@ def build(self, **kwargs):
include_file.write(include_string)
include_file.close()

@restoring_chdir
def build(self, **kwargs):
checkout_path = self.version.project.checkout_path(self.version.slug)
#site_path = os.path.join(checkout_path, 'site')
os.chdir(checkout_path)
self.append_conf()
# Actual build

build_command = "%s build --site-dir=site --theme=mkdocs" % (
project.venv_bin(version=self.version.slug,
bin='mkdocs')
build_command = "{command} {builder} --site-dir={build_dir} --theme=mkdocs".format(
command=self.version.project.venv_bin(version=self.version.slug, bin='mkdocs'),
builder=self.builder,
build_dir=self.build_dir,
)
results = run(build_command, shell=True)
return results

try:
# Index Search
page_list = []
log.info(LOG_TEMPLATE.format(project=self.version.project.slug, version=self.version.slug, msg='Indexing files'))
for root, dirnames, filenames in os.walk(site_path):
for filename in filenames:
if fnmatch.fnmatch(filename, '*.html'):
full_path = os.path.join(root, filename.lstrip('/'))
relative_path = os.path.join(root.replace(site_path, '').lstrip('/'), filename.lstrip('/'))
relative_path = re.sub('.html$', '', relative_path)
html = parse_content_from_file(documentation_type='mkdocs', file_path=full_path)
headers = parse_headers_from_file(documentation_type='mkdocs', file_path=full_path)
sections = parse_sections_from_file(documentation_type='mkdocs', file_path=full_path)
page_list.append(
{'content': html, 'path': relative_path, 'title': sections[0]['title'], 'headers': headers, 'sections': sections}
)

data = {
'page_list': page_list,
'version_pk': self.version.pk,
'project_pk': self.version.project.pk
}
log_msg = ' '.join([page['path'] for page in page_list])
log.info("(Search Index) Sending Data: %s [%s]" % (self.version.project.slug, log_msg))
apiv2.index_search.post({'data': data})
except:
log.error('Search indexing failed')

return results
class MkdocsHTML(BaseMkdocs):
type = 'mkdocs'
builder = 'build'
build_dir = '_build/html'


class MkdocsJSON(BaseMkdocs):
type = 'mkdocs_json'
builder = 'json'
build_dir = '_build/json'
3 changes: 2 additions & 1 deletion readthedocs/doc_builder/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@
'sphinx_search': sphinx.SearchBuilder,
'sphinx_singlehtmllocalmedia': sphinx.LocalMediaBuilder,
# Other markup
'mkdocs': mkdocs.Builder,
'mkdocs': mkdocs.MkdocsHTML,
'mkdocs_json': mkdocs.MkdocsJSON,
}
5 changes: 4 additions & 1 deletion readthedocs/projects/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,10 @@ def full_json_path(self, version='latest'):
"""
The path to the build json docs in the project.
"""
return os.path.join(self.conf_dir(version), "_build", "json")
if 'sphinx' in self.documentation_type:
return os.path.join(self.conf_dir(version), "_build", "json")
elif 'mkdocs' in self.documentation_type:
return os.path.join(self.checkout_path(version), "_build", "json")

def full_singlehtml_path(self, version='latest'):
"""
Expand Down
38 changes: 25 additions & 13 deletions readthedocs/projects/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
run_on_app_servers)
from core import utils as core_utils
from search.parse_json import process_all_json_files
from search.utils import process_mkdocs_json
from vcs_support import utils as vcs_support_utils

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -451,15 +452,24 @@ def build_docs(version, force, pdf, man, epub, dash, search, localmedia):
html_builder = builder_loading.get(project.documentation_type)(version)
if force:
html_builder.force()
# html_builder.clean()
if 'sphinx' in project.documentation_type:
html_builder.append_conf()
html_builder.append_conf()
results['html'] = html_builder.build()
if results['html'][0] == 0:
html_builder.move()

fake_results = (999, "Project Skipped, Didn't build",
"Project Skipped, Didn't build")
if 'mkdocs' in project.documentation_type:
if search:
try:
search_builder = builder_loading.get('mkdocs_json')(version)
results['search'] = search_builder.build()
if results['search'][0] == 0:
search_builder.move()
except:
log.error(LOG_TEMPLATE.format(
project=project.slug, version=version.slug, msg="JSON Build Error"), exc_info=True)

if 'sphinx' in project.documentation_type:
# Search builder. Creates JSON from docs and sends it to the
# server.
Expand Down Expand Up @@ -689,16 +699,18 @@ def record_pdf(api, record, results, state, version):
def update_search(version, build):
if 'sphinx' in version.project.documentation_type:
page_list = process_all_json_files(version)
data = {
'page_list': page_list,
'version_pk': version.pk,
'project_pk': version.project.pk,
'commit': build.get('commit'),
}
log_msg = ' '.join([page['path'] for page in page_list])
log.info("(Search Index) Sending Data: %s [%s]" % (
version.project.slug, log_msg))
apiv2.index_search.post({'data': data})
if 'mkdocs' in version.project.documentation_type:
page_list = process_mkdocs_json(version)

data = {
'page_list': page_list,
'version_pk': version.pk,
'project_pk': version.project.pk,
'commit': build.get('commit'),
}
log_msg = ' '.join([page['path'] for page in page_list])
log.info("(Search Index) Sending Data: %s [%s]" % (version.project.slug, log_msg))
apiv2.index_search.post({'data': data})


@task()
Expand Down
4 changes: 2 additions & 2 deletions readthedocs/restapi/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def index_search_request(version, page_list, commit):
page_obj = PageIndex()
project_scale = 1

tags = [tag.name for tag in project.tags.all()]
#tags = [tag.name for tag in project.tags.all()]

project_obj = ProjectIndex()
project_obj.index_document(data={
Expand All @@ -104,7 +104,7 @@ def index_search_request(version, page_list, commit):
'lang': project.language,
'author': [user.username for user in project.users.all()],
'url': project.get_absolute_url(),
'tags': tags,
'tags': None,
'_boost': project_scale,
})

Expand Down
Loading

0 comments on commit 92a183a

Please sign in to comment.