Skip to content

Commit

Permalink
Refactor gatsby-node to separate utils
Browse files Browse the repository at this point in the history
  • Loading branch information
psdcoder committed Apr 7, 2020
1 parent 60733ff commit b934a9f
Show file tree
Hide file tree
Showing 5 changed files with 313 additions and 297 deletions.
259 changes: 12 additions & 247 deletions gatsby-node.js
Original file line number Diff line number Diff line change
@@ -1,266 +1,31 @@
/* eslint-env node */

const fs = require('fs')
const path = require('path')
const GithubSlugger = require('github-slugger')
const { createFilePath } = require('gatsby-source-filesystem')
const tagToSlug = require('./src/utils/shared/tagToSlug')
const pagesGenerator = require('./src/utils/node/paginatable')
const { siteMetadata } = require('./gatsby-config')

const { getItemBySource } = require('./src/utils/shared/sidebar')

const remark = require('remark')
const remarkHTML = require('remark-html')

const markdownToHtml = remark().use(remarkHTML).processSync
const slugger = new GithubSlugger()

// Generate hedings data from markdown

const SLUG_REGEXP = /\s+{#([a-z0-9-]*[a-z0-9]+)}\s*$/

function extractSlugFromTitle(title) {
// extracts expressions like {#too-many-files} from the end of a title
const meta = title.match(SLUG_REGEXP)

if (meta) {
return [title.substring(0, meta.index), meta[1]]
}
return [title, slugger.slug(title)]
}

const parseHeadings = text => {
const headingRegex = /\n(## \s*)(.*)/g
const matches = []
let match
do {
match = headingRegex.exec(text)
if (match) {
const [title, slug] = extractSlugFromTitle(match[2])
matches.push({
text: title,
slug: slug
})
}
} while (match)

slugger.reset()
return matches
}
const {
getNodeSlug,
setPageContext,
removePageTrailingSlash
} = require('./src/utils/node/common')
const { createPages: createDocPages } = require('./src/utils/node/doc')
const { createPages: createBlogPages } = require('./src/utils/node/blog')

exports.onCreateNode = ({ node, actions, getNode }) => {
const { createNodeField } = actions

if (node.internal.type === 'MarkdownRemark') {
const contentPath = path.join(__dirname, 'content')
const source = node.fileAbsolutePath.replace(contentPath, '')
let value

if (source.startsWith('/blog')) {
value = createFilePath({
getNode,
node,
trailingSlash: false
}).replace(/^\/blog\/[0-9\-]*/, '/blog/')

// Convert fields in frontmatter from markdown to html
const {
frontmatter: { descriptionLong, pictureComment }
} = node

if (descriptionLong) {
node.frontmatter.descriptionLong = markdownToHtml(
descriptionLong
).contents
}

if (pictureComment) {
node.frontmatter.pictureComment = markdownToHtml(
pictureComment
).contents
}
// end Convert fields
} else {
value = getItemBySource(source).path
}

createNodeField({
name: 'slug',
node,
value
value: getNodeSlug(node, getNode)
})
}
}

exports.createPages = async ({ graphql, actions }) => {
// DOCS
const docsResponse = await graphql(
`
{
docs: allMarkdownRemark(
filter: { fileAbsolutePath: { regex: "/content/docs/" } }
limit: 9999
) {
edges {
node {
rawMarkdownBody
fields {
slug
}
}
}
}
}
`
)

if (docsResponse.errors) {
throw docsResponse.errors
}

const docComponent = path.resolve('./src/templates/doc-home.tsx')

docsResponse.data.docs.edges.forEach(doc => {
const headings = parseHeadings(doc.node.rawMarkdownBody)

if (doc.node.fields.slug) {
actions.createPage({
component: docComponent,
path: doc.node.fields.slug,
context: {
isDocs: true,
slug: doc.node.fields.slug,
headings
}
})
}
})

// Blog
const blogResponse = await graphql(
`
{
allMarkdownRemark(
sort: { fields: [frontmatter___date], order: DESC }
filter: { fileAbsolutePath: { regex: "/content/blog/" } }
limit: 9999
) {
edges {
node {
fields {
slug
}
frontmatter {
title
}
}
}
}
home: allMarkdownRemark(
sort: { fields: [frontmatter___date], order: DESC }
filter: { fileAbsolutePath: { regex: "/content/blog/" } }
limit: 9999
) {
pageInfo {
itemCount
}
}
tags: allMarkdownRemark(limit: 9999) {
group(field: frontmatter___tags) {
fieldValue
pageInfo {
itemCount
}
}
}
}
`
)

if (blogResponse.errors) {
throw blogResponse.errors
}

// Create home blog pages (with pagination)
const blogHomeTemplate = path.resolve('./src/templates/blog-home.tsx')

for (const page of pagesGenerator({
basePath: '/blog',
hasHeroItem: true,
itemCount: blogResponse.data.home.pageInfo.itemCount
})) {
actions.createPage({
component: blogHomeTemplate,
path: page.path,
context: {
isBlog: true,
...page.context
}
})
}

// Create blog posts pages
const blogPostTemplate = path.resolve('./src/templates/blog-post.tsx')
const posts = blogResponse.data.allMarkdownRemark.edges

posts.forEach((post, index) => {
const previous = index === posts.length - 1 ? null : posts[index + 1].node
const next = index === 0 ? null : posts[index - 1].node

actions.createPage({
component: blogPostTemplate,
context: {
isBlog: true,
currentPage: index + 1,
next,
previous,
slug: post.node.fields.slug
},
path: post.node.fields.slug
})
})

// Create tags pages (with pagination)
const blogTagsTemplate = path.resolve('./src/templates/blog-tags.tsx')

blogResponse.data.tags.group.forEach(
({ fieldValue: tag, pageInfo: { itemCount } }) => {
const basePath = `/tags/${tagToSlug(tag)}`

for (const page of pagesGenerator({ basePath, itemCount })) {
actions.createPage({
component: blogTagsTemplate,
path: page.path,
context: { tag, ...page.context }
})
}
}
)
createDocPages({ graphql, actions })
createBlogPages({ graphql, actions })
}

const is404Regexp = /^\/404/
const trailingSlashRegexp = /\/$/

exports.onCreatePage = ({ page, actions }) => {
// Set necessary flags for pageContext
const newPage = {
...page,
context: {
...page.context,
is404: is404Regexp.test(page.path)
}
}

// Remove trailing slash
if (page.path !== '/' && trailingSlashRegexp.test(newPage.path)) {
newPage.path = newPage.path.replace(trailingSlashRegexp, '')
}

if (newPage !== page) {
actions.deletePage(page)
actions.createPage(newPage)
}
setPageContext(page, actions)
removePageTrailingSlash(page, actions)
}

// Ignore warnings about CSS inclusion order, because we use CSS modules.
Expand Down
Loading

0 comments on commit b934a9f

Please sign in to comment.