Skip to content

Commit

Permalink
feat: add tests for build post list script (#3284)
Browse files Browse the repository at this point in the history
Co-authored-by: Akshat Nema <[email protected]>%0ACo-authored-by: Ansh Goyal <[email protected]>
  • Loading branch information
vishvamsinh28 and anshgoyalevil authored Dec 25, 2024
1 parent 2199c3b commit d141dcc
Show file tree
Hide file tree
Showing 6 changed files with 471 additions and 88 deletions.
218 changes: 133 additions & 85 deletions scripts/build-post-list.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
const { readdirSync, statSync, existsSync, readFileSync, writeFileSync } = require('fs')
const { resolve, basename } = require('path')
const { readdir, stat, pathExists, readFile, writeFile } = require('fs-extra')
const { basename, join, normalize, sep, posix, relative, parse } = require('path')
const frontMatter = require('gray-matter')
const toc = require('markdown-toc')
const { slugify } = require('markdown-toc/lib/utils')
const readingTime = require('reading-time')
const { markdownToTxt } = require('markdown-to-txt')
const { buildNavTree, addDocButtons } = require('./build-docs')
Expand All @@ -15,51 +14,102 @@ const result = {
docsTree: {}
}
const releaseNotes = []
const basePath = 'pages'
const postDirectories = [
// order of these directories is important, as the blog should come before docs, to create a list of available release notes, which will later be used to release-note-link for spec docs
[`${basePath}/blog`, '/blog'],
[`${basePath}/docs`, '/docs'],
[`${basePath}/about`, '/about']
];

const addItem = (details) => {
if(details.slug.startsWith('/docs'))
result["docs"].push(details)
else if(details.slug.startsWith('/blog'))
result["blog"].push(details)
else if(details.slug.startsWith('/about'))
result["about"].push(details)
else {}
if (!details || typeof details.slug !== 'string') {
throw new Error('Invalid details object provided to addItem');
}
const sectionMap = {
'/docs': 'docs',
'/blog': 'blog',
'/about': 'about'
};
const section = Object.keys(sectionMap).find(key => details.slug.startsWith(key));
if (section) {
result[sectionMap[section]].push(details);
}
};

function getVersionDetails(slug, weight) {
const fileBaseName = basename(slug);
const versionName = fileBaseName.split('-')[0];
return {
title: versionName.startsWith('v')
? capitalize(versionName.slice(1))
: capitalize(versionName),
weight
};
}

/**
* Builds a list of posts from the specified directories and writes it to a file
* @param {Array<Array<string>>} postDirectories - Array of [directory, slug] tuples
* @param {string} basePath - Base path for resolving relative paths
* @param {string} writeFilePath - Path where the output JSON will be written
* @throws {Error} If required parameters are missing or if any operation fails
* @returns {Promise<void>}
*/
async function buildPostList(postDirectories, basePath, writeFilePath) {
try {

if (!basePath) {
throw new Error('Error while building post list: basePath is required');
}

if (!writeFilePath) {
throw new Error('Error while building post list: writeFilePath is required');
}

if (postDirectories.length === 0) {
throw new Error('Error while building post list: postDirectories array is empty');
}
const normalizedBasePath = normalize(basePath)
await walkDirectories(postDirectories, result, normalizedBasePath)
const treePosts = buildNavTree(result.docs.filter((p) => p.slug.startsWith('/docs/')))
result.docsTree = treePosts
result.docs = addDocButtons(result.docs, treePosts)
await writeFile(writeFilePath, JSON.stringify(result, null, ' '))
} catch (error) {
throw new Error(`Error while building post list: ${error.message}`, { cause: error });
}
}

module.exports = async function buildPostList() {
walkDirectories(postDirectories, result)
const treePosts = buildNavTree(result["docs"].filter((p) => p.slug.startsWith('/docs/')))
result["docsTree"] = treePosts
result["docs"] = addDocButtons(result["docs"], treePosts)
if (process.env.NODE_ENV === 'production') {
// console.log(inspect(result, { depth: null, colors: true }))
function handleSpecificationVersion(details, fileBaseName) {
if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) {
details.isPrerelease = true;
details.title += " (Pre-release)";
}
if (fileBaseName.includes('explorer')) {
details.title += " - Explorer";
}
writeFileSync(resolve(__dirname, '..', 'config', 'posts.json'), JSON.stringify(result, null, ' '))
return details;
}

function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, sectionId, rootSectionId) {
async function walkDirectories(
directories,
resultObj,
basePath,
sectionTitle,
sectionId,
rootSectionId,
sectionWeight = 0
) {
for (let dir of directories) {
let directory = dir[0]
let sectionSlug = dir[1] || ''
let files = readdirSync(directory);
const directory = posix.normalize(dir[0]);
const sectionSlug = dir[1] || '';
const files = await readdir(directory)

for (let file of files) {
let details
const fileName = [directory, file].join('/')
const fileNameWithSection = [fileName, '_section.mdx'].join('/')
const slug = fileName.replace(new RegExp(`^${basePath}`), '')
const slugElements = slug.split('/');
if (isDirectory(fileName)) {
if (existsSync(fileNameWithSection)) {
let details;
const fileName = normalize(join(directory, file));
const fileNameWithSection = normalize(join(fileName, '_section.mdx'))
const slug = `/${normalize(relative(basePath, fileName)).replace(/\\/g, '/')}`
const slugElements = slug.split('/')

if (await isDirectory(fileName)) {
if (await pathExists(fileNameWithSection)) {
// Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
details = frontMatter(readFileSync(fileNameWithSection, 'utf-8'), {}).data
details = frontMatter(await readFile(fileNameWithSection, 'utf-8'), {}).data
details.title = details.title || capitalize(basename(fileName))
} else {
details = {
Expand All @@ -68,8 +118,8 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
}
details.isSection = true
if (slugElements.length > 3) {
details.parent = slugElements[slugElements.length - 2]
details.sectionId = slugElements[slugElements.length - 1]
details.parent = slugElements[slugElements.length - 2]
details.sectionId = slugElements[slugElements.length - 1]
}
if (!details.parent) {
details.isRootSection = true
Expand All @@ -79,9 +129,9 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
details.slug = slug
addItem(details)
const rootId = details.parent || details.rootSectionId
walkDirectories([[fileName, slug]], result, details.weight, details.title, details.sectionId, rootId)
} else if (file.endsWith('.mdx') && !fileName.endsWith('/_section.mdx')) {
const fileContent = readFileSync(fileName, 'utf-8')
await walkDirectories([[fileName, slug]], resultObj, basePath, details.title, details.sectionId, rootId, details.sectionWeight)
} else if (file.endsWith('.mdx') && !fileName.endsWith(sep + '_section.mdx')) {
const fileContent = await readFile(fileName, 'utf-8')
// Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
const { data, content } = frontMatter(fileContent, {})
details = data
Expand All @@ -93,43 +143,27 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
details.sectionTitle = sectionTitle
details.sectionId = sectionId
details.rootSectionId = rootSectionId
details.id = fileName
details.isIndex = fileName.endsWith('/index.mdx')
details.id = fileName.replace(/\\/g, '/')
details.isIndex = fileName.endsWith(join('index.mdx'))
details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, '')
if(details.slug.includes('/reference/specification/') && !details.title) {
const fileBaseName = basename(data.slug) // ex. v2.0.0 | v2.1.0-next-spec.1
const fileName = fileBaseName.split('-')[0] // v2.0.0 | v2.1.0
details.weight = specWeight--

if (fileName.startsWith('v')) {
details.title = capitalize(fileName.slice(1))
} else {
details.title = capitalize(fileName)
}
if (details.slug.includes('/reference/specification/') && !details.title) {
const fileBaseName = basename(details.slug)
const versionDetails = getVersionDetails(details.slug, specWeight--);
details.title = versionDetails.title;
details.weight = versionDetails.weight;

if(releaseNotes.includes(details.title)){
if (releaseNotes.includes(details.title)) {
details.releaseNoteLink = `/blog/release-notes-${details.title}`
}

if (fileBaseName.includes('next-spec') || fileBaseName.includes('next-major-spec')) {
details.isPrerelease = true
// this need to be separate because the `-` in "Pre-release" will get removed by `capitalize()` function
details.title += " (Pre-release)"
}
if (fileBaseName.includes('explorer')) {
details.title += " - Explorer"
}
details = handleSpecificationVersion(details, fileBaseName);
}

// To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute.
if(file.startsWith("release-notes") && dir[1] === "/blog"){
const fileName_without_extension = file.slice(0,-4)
// removes the file extension. For example, release-notes-2.1.0.md -> release-notes-2.1.0
const version = fileName_without_extension.slice(fileName_without_extension.lastIndexOf("-")+1)

// gets the version from the name of the releaseNote .md file (from /blog). For example, version = 2.1.0 if fileName_without_extension = release-notes-2.1.0
releaseNotes.push(version)
// releaseNotes is the list of all available releaseNotes
if (file.startsWith('release-notes') && dir[1] === '/blog') {
const { name } = parse(file);
const version = name.split('-').pop();
releaseNotes.push(version);
}

addItem(details)
Expand All @@ -138,24 +172,38 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
}
}

// Matches heading IDs in two formats:
// 1. {#my-heading-id}
// 2. <a name="my-heading-id">
const HEADING_ID_REGEX = /[\s]*(?:\{#([a-zA-Z0-9\-_]+)\}|<a[\s]+name="([a-zA-Z0-9\-_]+)")/;

/**
* Extracts heading IDs from markdown headings
* @param {string} str - The heading text containing potential ID
* @returns {string} The extracted ID or empty string if no valid ID found
*/
function slugifyToC(str) {
let slug
// Try to match heading ids like {# myHeadingId}
const headingIdMatch = str.match(/[\s]?\{\#([\w\d\-_]+)\}/)
if (headingIdMatch && headingIdMatch.length >= 2) {
slug = headingIdMatch[1]
} else {
// Try to match heading ids like {<a name="myHeadingId"/>}
const anchorTagMatch = str.match(/[\s]*<a[\s]+name="([\w\d\s\-_]+)"/)
if (anchorTagMatch && anchorTagMatch.length >= 2) slug = anchorTagMatch[1]
}
return slug || slugify(str, { firsth1: true, maxdepth: 6 })
if (typeof str !== 'string') return '';
if (!str.trim()) return '';
let slug = '';

// Match heading IDs like {# myHeadingId}
const idMatch = str.match(HEADING_ID_REGEX);
const [, headingId, anchorId] = idMatch || [];
slug = (headingId || anchorId || '').trim();

// If no valid ID is found, return an empty string
return slug;
}

function isDirectory(dir) {
return statSync(dir).isDirectory()
async function isDirectory(dir) {
return (await stat(dir)).isDirectory()
}

function capitalize(text) {
return text.split(/[\s\-]/g).map(word => `${word[0].toUpperCase()}${word.substr(1)}`).join(' ')
return text.split(/[\s-]/)
.map(word => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ');
}

module.exports = { slugifyToC, buildPostList, addItem }
14 changes: 12 additions & 2 deletions scripts/index.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,23 @@
const { resolve } = require('path');
const fs = require('fs');
const rssFeed = require('./build-rss');
const buildPostList = require('./build-post-list');
const { buildPostList } = require('./build-post-list');
const buildCaseStudiesList = require('./casestudies');
const buildAdoptersList = require('./adopters');
const buildFinanceInfoList = require('./finance');

async function start() {
await buildPostList();

const postDirectories = [
['pages/blog', '/blog'],
['pages/docs', '/docs'],
['pages/about', '/about']
];
const basePath = 'pages';
const writeFilePath = resolve(__dirname, '../config', 'posts.json');

await buildPostList(postDirectories, basePath, writeFilePath);

rssFeed(
'blog',
'AsyncAPI Initiative Blog RSS Feed',
Expand Down
Loading

0 comments on commit d141dcc

Please sign in to comment.