From 3dc1fbb71b294f1fac43af3bcae4692ce9f04022 Mon Sep 17 00:00:00 2001 From: Rob Ellison Date: Tue, 22 Aug 2023 12:50:59 +0000 Subject: [PATCH] feat: extend cache --- components/content/ContentPage.jsx | 4 +- lib/content/menuContent.js | 115 ++++++++++++---------- lib/github/index.js | 70 ++++++++++--- lib/hooks/usePageContent.js | 151 ++++++++++++++++++----------- lib/hooks/usePageMenu.js | 95 +++++++++++++----- 5 files changed, 287 insertions(+), 148 deletions(-) diff --git a/components/content/ContentPage.jsx b/components/content/ContentPage.jsx index e7fc1eff..e08e2957 100644 --- a/components/content/ContentPage.jsx +++ b/components/content/ContentPage.jsx @@ -245,8 +245,8 @@ function ContentMenu({ content, file, handleContentChange, handlePageReset, cont let directory = path.dirname(file); - console.log('ContentMenu:directory ', directory) - console.log('ContentMenu:collection ', context) + // console.log('ContentMenu:directory ', directory) + // console.log('ContentMenu:collection ', context) let chaptersMenu = [] if (content && content[directory]) { diff --git a/lib/content/menuContent.js b/lib/content/menuContent.js index fa30f9ae..410849c1 100644 --- a/lib/content/menuContent.js +++ b/lib/content/menuContent.js @@ -4,6 +4,35 @@ import { cacheWrite, cacheRead } from '@/lib/redis'; import path from 'path'; export async function getFrontMatter(config) { + const branchSha = await getBranchSha(config.owner, config.repo, config.branch) + // const frontmatterCache = `structure:${config.path}:${branchSha}`; + + // let files = null; + // const files = JSON.parse(await cacheRead(frontmatterCache)); + // if (!files) { // files aren't cached + + // } + + + + const cacheKey = `frontmatter:${config.path}:${branchSha}`; + + let cachedContent; + try { + cachedContent = JSON.parse(await cacheRead(cacheKey)); + } catch (error) { + // Handle the error when JSON parsing fails (invalid data). + console.error('Error parsing cached content:', error); + cachedContent = null; // Or use a default value if required. + } + if (cachedContent) { + console.info('[Menu][Cache][HIT]:', cacheKey) + // If the content was found in the cache, return it + return cachedContent; + } else { + console.info('[Menu][Cache][MISS]:', cacheKey) + } + const files = await getAllFiles( config.owner, config.repo, @@ -13,12 +42,9 @@ export async function getFrontMatter(config) { ".md*" ); - const branchSha = await getBranchSha(config.owner, config.repo, config.branch) - const cacheKey = `frontmatter:${config.path}:${branchSha}`; - const filesPromises = files.map((file) => { - return getFileContent(config.owner, config.repo, config.branch, file) + return getFileContent(config.owner, config.repo, config.branch, file.path, file.sha) .then((content) => { const matterData = matter(content, { excerpt: false }).data || null; if (matterData) { @@ -28,35 +54,20 @@ export async function getFrontMatter(config) { } } } - return { file: file, frontmatter: matterData }; + return { file: file.path, frontmatter: matterData }; }) .catch((error) => { - // console.error(`Error processing file ${file}: ${error}`); + console.error(`[Menu][Cache][Error] processing file: ${file}: ${error}`); return { file: null, frontmatter: null }; }); }); - let cachedContent; - try { - cachedContent = JSON.parse(await cacheRead(cacheKey)); - } catch (error) { - // Handle the error when JSON parsing fails (invalid data). - console.error('Error parsing cached content:', error); - cachedContent = null; // Or use a default value if required. - } - if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) - // If the content was found in the cache, return it - return cachedContent; - } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) - } cachedContent = await Promise.all(filesPromises); try { - await cacheWrite(cacheKey, JSON.stringify(cachedContent), 60 * 60 * 24); // cache for 24 hours + await cacheWrite(cacheKey, JSON.stringify(cachedContent), 60 * 60 * 24 * 7); // cache for 7 days } catch (error) { - console.error('[GitHub][Cache][FAIL]:', cacheKey, ' : ', error) + console.error('[Menu][Cache][FAIL]:', cacheKey, ' : ', error) } return await Promise.all(cachedContent); @@ -75,21 +86,23 @@ export async function getContent(siteConfig) { console.error('Error parsing cached content:', error); cachedContent = null; // Or use a default value if required. } - if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + if (cachedContent && cachedContent.length > 0) { + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const files = await getAllFiles(siteConfig.owner, siteConfig.repo, siteConfig.branch, siteConfig.path, true, '.md*'); + console.log('[Menu][getAllFiles]: ', files) const contentPromises = files.map((file) => { return getFileContent( siteConfig.owner, siteConfig.repo, siteConfig.branch, - file + file.path, + file.sha ) .then(content => { const matterData = matter(content, { excerpt: false }).data || null; @@ -123,11 +136,11 @@ async function getSolutions(siteConfig) { // Check if the content is in the cache const cachedContent = JSON.parse(await cacheRead(cacheKey)); if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const solutions = await getAllFiles(siteConfig.content.solutions.owner, siteConfig.content.solutions.repo, siteConfig.content.solutions.branch, siteConfig.content.solutions.path, true, '.md*'); @@ -170,11 +183,11 @@ async function getKnowledge(siteConfig) { // Check if the content is in the cache const cachedContent = JSON.parse(await cacheRead(cacheKey)); if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const knowledge = await getAllFiles(siteConfig.content.knowledge.owner, siteConfig.content.knowledge.repo, siteConfig.content.knowledge.branch, siteConfig.content.knowledge.path, true, '.md*'); @@ -210,11 +223,11 @@ async function getDesigns(siteConfig) { // Check if the content is in the cache const cachedContent = JSON.parse(await cacheRead(cacheKey)); if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const contentFiles = await getAllFiles(siteConfig.content.designs.owner, siteConfig.content.designs.repo, siteConfig.content.designs.branch, siteConfig.content.designs.path, true, '.md*'); @@ -250,11 +263,11 @@ async function getServices(siteConfig) { // Check if the content is in the cache const cachedContent = JSON.parse(await cacheRead(cacheKey)); if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const contentFiles = await getAllFiles(siteConfig.content.services.owner, siteConfig.content.services.repo, siteConfig.content.services.branch, siteConfig.content.services.path, true, '.md*'); @@ -289,11 +302,11 @@ export async function getMenuStructureSolutions(siteConfig) { // Check if the content is in the cache const cachedContent = JSON.parse(await cacheRead(cacheKey)); if (cachedContent) { - console.info('[GitHub][Cache][HIT]:', cacheKey) + console.info('[Menu][Cache][HIT]:', cacheKey) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[GitHub][Cache][MISS]:', cacheKey) + console.info('[Menu][Cache][MISS]:', cacheKey) } const parent = 'solution'; const solutions = await getSolutions(siteConfig); @@ -432,6 +445,7 @@ export async function getMenuStructure(siteConfig, collection) { } const parent = 'siteConfig.path'; const primary = await getContent(collection); + // console.info('[getMenuStructure][primary]:', primary) let relatedFiles = {}; // all the files in related collections let relatedContent = {}; // only the files that are children of the primary content @@ -451,16 +465,17 @@ export async function getMenuStructure(siteConfig, collection) { for (let x of primary) { if ( x.file && - x.file.split('/').length === 3 && - x.file.match(/(_index\.md*|index\.md*)$/) && + x.file.path && + x.file.path.split('/').length === 3 && + x.file.path.match(/(_index\.md*|index\.md*)$/) && x.frontmatter && x.frontmatter.title ) { primaryMenu.push({ label: x.frontmatter.title, - url: x.file.startsWith('/') ? x.file : '/' + x.file, + url: x.file.path.startsWith('/') ? x.file.path : '/' + x.file.path, }); - indexFiles.add(path.dirname(x.file)); // Add directory name to the Set + indexFiles.add(path.dirname(x.file.path)); // Add directory name to the Set } } @@ -468,22 +483,23 @@ export async function getMenuStructure(siteConfig, collection) { for (let x of primary) { if ( x.file && - x.file.split('/').length > 2 && // skip any files in the root of the directory - !x.file.match(/(_index\.md*|index\.md*)$/) && + x.file.path && + x.file.path.split('/').length > 2 && // skip any files in the root of the directory + !x.file.path.match(/(_index\.md*|index\.md*)$/) && x.frontmatter && x.frontmatter.title ) { // let directory = x.file.split("/")[1]; // Extract directory name - let directory = path.dirname(x.file) + let directory = path.dirname(x.file.path) // console.log('getMenuStructure') - let collection = x.file.split("/")[0]; // Extract directory name + let collection = x.file.path.split("/")[0]; // Extract directory name // Only add file to solutionMenu if there is no corresponding index.md if (!indexFiles.has(directory)) { primaryMenu.push({ label: x.frontmatter.title, - url: x.file.startsWith('/') ? x.file : '/' + x.file, + url: x.file.path.startsWith('/') ? x.file.path : '/' + x.file.path, }); } @@ -498,7 +514,7 @@ export async function getMenuStructure(siteConfig, collection) { // add the related content relatedContent[directory]['chapters'].push({ label: x.frontmatter.title, - url: x.file.startsWith('/') ? x.file : '/' + x.file, + url: x.file.path.startsWith('/') ? x.file.path : '/' + x.file.path, }); } @@ -517,7 +533,8 @@ export async function getMenuStructure(siteConfig, collection) { for (let x of relatedFiles[collectionItem]) { if ( x.file && - x.file.split('/').length > 2 && // skip any files in the root of the directory + x.file.path && + x.file.path.split('/').length > 2 && // skip any files in the root of the directory x.frontmatter && x.frontmatter.title ) { @@ -539,7 +556,7 @@ export async function getMenuStructure(siteConfig, collection) { // add the related content relatedContent[directory][collectionItem].push({ label: x.frontmatter.title, - url: x.file.startsWith('/') ? x.file : '/' + x.file, + url: x.file.path.startsWith('/') ? x.file.path : '/' + x.file.path, }); } diff --git a/lib/github/index.js b/lib/github/index.js index a1da2c36..271fe22d 100644 --- a/lib/github/index.js +++ b/lib/github/index.js @@ -76,21 +76,52 @@ export async function getBranchSha(owner, repo, branch) { } // Function to get a file content -export async function getFileContent(owner, repo, branch, path) { +export async function getFileContent(owner, repo, branch, path, sha = null) { - const branchSha = await getBranchSha(owner, repo, branch,) + // if the SHA is passed, this is a specific revision of a file. + // if not, pull back the generic revision of the file, stored with the branch sha instead. + + const branchSha = await getBranchSha(owner, repo, branch) // Generate a unique cache key for this file - const cacheKey = `github:getContent:${owner}:${repo}:${branchSha}:${path}`; + let cacheKey = ''; + if (sha) { + cacheKey = `github:getContent:${owner}:${repo}:${sha}:${path}`; + } else { + cacheKey = `github:getContent:${owner}:${repo}:${branchSha}:${path}`; + } // Check if the content is in the cache const cachedContent = await cacheRead(cacheKey); + + let ref = null; + if (cachedContent) { + try { + ref = JSON.parse(cachedContent) + // console.info('[Github][Read][Ref]:', ref.ref) + + if (ref && ref.ref) { + const cachedRefContent = await cacheRead(`github:getContent:${owner}:${repo}:${ref.ref}:${path}`); + if (cachedRefContent) { + console.info('[Github][Read][HIT/Sha]:', cacheKey, ' ref:', ref.ref) + return cachedRefContent; + } else { + console.info('[Github][Read][MISS/Sha]:', cacheKey, ' ref:', ref.ref) + } + } else { + console.info('[Github][Read][HIT/Branch]:', cacheKey) + return cachedContent; + } + } catch (error) { + console.info('[Github][Read][Error]:', cacheKey, 'error: ', error) + return cachedContent; + } // console.info('[Github][Cache][HIT]:',cacheKey ) // If the content was found in the cache, return it return cachedContent; } else { - console.info('[Github][Cache][MISS]:', cacheKey) + console.info('[Github][Read][MISS/All]:', cacheKey) } if (!gitHubInstance) { @@ -114,12 +145,26 @@ export async function getFileContent(owner, repo, branch, path) { content = Buffer.from(response.data.content, "utf-8"); } try { - // Store the content in the cache before returning it - await cacheWrite(cacheKey, content, 60 * 60 * 24); // cache for 24 hours - } catch (error) { - console.error(`[GitHub][getBranchSha] Error writing cache: ${error}`); - } + + if (response.data.sha) { + // Store a link from the branchSha to the file + ref = {ref: response.data.sha} + await cacheWrite(`github:getContent:${owner}:${repo}:${branchSha}:${path}`, JSON.stringify(ref)); // cache perpetually a reference to the file + await cacheWrite(`github:getContent:${owner}:${repo}:${response.data.sha}:${path}`, content); // cache perpetually the file contents + console.debug(`[GitHub][Write][CachedFileAndRef] : ${path}`); + + } else { + // Store the content in the cache before returning it + await cacheWrite(cacheKey, content); // cache for 24 hours + console.debug(`[GitHub][Write][Cache] : ${path}`); + + } + + } catch (error) { + console.error(`[GitHub][Write] Error writing cache: ${error}`); + + } return content; } catch (error) { console.error(`[GitHub][getFileContent] Error retrieving file (${cacheKey}) content: ${error}`); @@ -163,6 +208,7 @@ async function getAllFilesRecursive( recursive, filter, ) { + const response = await gitHubInstance.repos.getContent({ owner, repo, @@ -171,8 +217,8 @@ async function getAllFilesRecursive( }); const fileObjects = response.data.filter((obj) => obj.type === "file"); - let files = fileObjects.map((obj) => obj.path); - + let files = fileObjects.map((obj) => ({ path: obj.path, sha: obj.sha})); + // console.log('files: ', files) if (recursive) { const dirObjects = response.data.filter((obj) => obj.type === "dir"); for (const dirObject of dirObjects) { @@ -191,7 +237,7 @@ async function getAllFilesRecursive( } if (filter) { const regex = createFilterRegex(filter); - files = files.filter((file) => regex.test(file)); + files = files.filter((file) => regex.test(file.path)); } return files; diff --git a/lib/hooks/usePageContent.js b/lib/hooks/usePageContent.js index d7228261..efd40592 100644 --- a/lib/hooks/usePageContent.js +++ b/lib/hooks/usePageContent.js @@ -93,22 +93,25 @@ export function usePageContent(initialContent, initialFile, initialMenuStructure setContent({ frontmatter: frontmatter }) } - const handlePageReset = async () => { - setContext({ file: initialFile, ...collection }); + const handlePageReset = () => { + + console.log('reset: ', initialFile, ' :collection: ', collection) + + setContext({ file: initialFile, ...collectionName(initialFile, collection) }); setContent(initialContent); clearQueryParams(); setRelPage(null); } function collectionName(url, collection) { - console.debug('collectionName:url', url) + // console.debug('collectionName:url', url) const fileCollection = url && url.includes("/") ? url.split("/")[0] : ""; - console.debug('collectionName:fileCollection', fileCollection) + // console.debug('collectionName:fileCollection', fileCollection) if (siteConfig.content[fileCollection]) { const newCollection = siteConfig.content[fileCollection] - console.debug('collectionName:newCollection', newCollection) + // console.debug('collectionName:newCollection', newCollection) return newCollection } else { @@ -141,22 +144,62 @@ export function usePageContent(initialContent, initialFile, initialMenuStructure const data = await response.text(); setContent(data); // set the collection from the file + const newContext = { ...context, file: url, ...collectionName(url, collection) }; - // const collectionName = url && url.includes("/") ? url.split("/")[1] : ""; - // console.debug('handleContentChange:collectionName', collectionName) - setContext({ ...context, file: url, ...collectionName(url, collection) }) // force a refresh of the page - + if (relPage) { // there is a direct link to a file via a queryparameter + relativeContent(relPage); + } - // if (siteConfig.content[collectionName]) { - // const newCollection = siteConfig.content[collectionName] - // // console.debug('handleContentChange:newCollection', newCollection) - // setContext({ ...context, file: url, ...newCollection }) // force a refresh of the page - // } else { - // setContext({ ...context, file: url, ...collection }) // force a refresh of the page - // } + if (url && url.endsWith(".md")) { // load normal markdown files + setContentSource('api'); + const { mdxContent, frontmatter } = loadMDX(data, 'md'); + if (frontmatter.external_repo || frontmatter.external) { + // console.log('loading external content : ', newContext, frontmatter) + // await githubExternalContent(frontmatter, newContext); + const { newContent: extContent, newContext: extContext } = await githubExternal(frontmatter, newContext); + // console.log('handleContentChange:extContent: ', extContent) + + const { mdxContent: mdxExtContent, frontmatter: extFrontmatter } = loadMDX(extContent, 'md'); + // console.log('handleContentChange:mdxExtContent: ', mdxExtContent) + + if (extContext && !deepEqual(context, extContext)) { + setContext(extContext); + } + setPageContent({ content: mdxExtContent, frontmatter: extFrontmatter }); + // setContent(newContent); + } else { + setContext(newContext) // force a refresh of the page + setPageContent({ content: mdxContent, frontmatter: frontmatter }); + } + + } else if (url && url.file.endsWith(".mdx")) { // load MDX files + setContentSource('api'); + const { mdxContent, frontmatter } = loadMDX(data, 'mdx'); + + if (frontmatter.external_repo || frontmatter.external) { + // console.log('loading external content : ', newContext, frontmatter) + // await githubExternalContent(frontmatter, newContext); + const { newContent: extContent, newContext: extContext } = await githubExternal(frontmatter, newContext); + // console.log('handleContentChange:extContent: ', extContent) + + const { mdxContent: mdxExtContent, frontmatter: extFrontmatter } = loadMDX(extContent, 'md'); + // console.log('handleContentChange:mdxExtContent: ', mdxExtContent) + + if (extContext && !deepEqual(context, extContext)) { + setContext(extContext); + } + setPageContent({ content: mdxExtContent, frontmatter: extFrontmatter }); + // setContent(newContent); + } else { + setContext(newContext) // force a refresh of the page + setPageContent({ content: mdxContent, frontmatter: frontmatter }); + } + } else if (url && url.endsWith(".etherpad")) { // load etherpad files + loadPad(url); + } } else { @@ -168,31 +211,35 @@ export function usePageContent(initialContent, initialFile, initialMenuStructure } }; - useEffect(() => { // when the context changes, reprocess it - // console.log('useEffect[context]:context: ', context) - // console.log('useEffect[context]:relPage: ', relPage) - const loadPad = async (file) => { - const cacheKey = 'etherpad:/' + file; - const { frontmatter } = await fetchPadDetails(cacheKey); - setContentSource('etherpad:' + frontmatter.padID); - }; + const loadPad = async (file) => { + const cacheKey = 'etherpad:/' + file; + const { frontmatter } = await fetchPadDetails(cacheKey); + setContentSource('etherpad:' + frontmatter.padID); + }; - const relativeContent = async (file) => { - await handleContentChange(file, true); - }; + const relativeContent = async (file) => { + await handleContentChange(file, true); + }; - const githubExternalContent = async (frontmatter, existingContext) => { - const { newContent, newContext } = await githubExternal(frontmatter, existingContext); - // setFile(file); - if (newContext && !deepEqual(context, newContext)) { - setContext(newContext); - // console.log('useEffect:githubExternalContent/newContent: ', newContent) - // console.log('useEffect:githubExternalContent/newContext: ', newContext) + const githubExternalContent = async (frontmatter, existingContext) => { + const { newContent, newContext } = await githubExternal(frontmatter, existingContext); + // setFile(file); + if (newContext && !deepEqual(context, newContext)) { + setContext(newContext); + // console.log('useEffect:githubExternalContent/newContent: ', newContent) + // console.log('useEffect:githubExternalContent/newContext: ', newContext) - } - setContent(newContent); - }; + } + setContent(newContent); + }; + + + useEffect(() => { // when the context changes, reprocess it + // console.log('useEffect[context]:context: ', context) + // console.log('useEffect[context]:relPage: ', relPage) + + console.log('useEffect[context]'); if (relPage) { // there is a direct link to a file via a queryparameter relativeContent(relPage); @@ -203,38 +250,24 @@ export function usePageContent(initialContent, initialFile, initialMenuStructure setContentSource('api'); const { mdxContent, frontmatter } = loadMDX(content ? content : initialContent, 'md'); - if (frontmatter.external_repo || frontmatter.external) { - githubExternalContent(frontmatter, context); - } + // if (frontmatter.external_repo || frontmatter.external) { + // githubExternalContent(frontmatter, context); + // } setPageContent({ content: mdxContent, frontmatter: frontmatter }); } else if (context && context.file && context.file.endsWith(".mdx")) { // load MDX files setContentSource('api'); const { mdxContent, frontmatter } = loadMDX(content ? content : initialContent, 'mdx'); - if (frontmatter.external_repo || frontmatter.external) { // the content is a link to elsewhere in Github. load it. - githubExternalContent(frontmatter, context); - } + // if (frontmatter.external_repo || frontmatter.external) { // the content is a link to elsewhere in Github. load it. + // githubExternalContent(frontmatter, context); + // } setPageContent({ content: mdxContent, frontmatter: frontmatter }); } else if (context && context.file && context.file.endsWith(".etherpad")) { // load etherpad files loadPad(context.file); } - }, [context]); - + }, []); - function logObjectDifferences(oldObj, newObj) { - const diff = {}; - - // Compare oldObj and newObj properties - for (const key in newObj) { - if (oldObj[key] !== newObj[key]) { - diff[key] = { - old: oldObj[key], - new: newObj[key], - }; - } - } - } useEffect(() => { // add to the menu structure const fetchPadMenu = async () => { @@ -249,7 +282,7 @@ export function usePageContent(initialContent, initialFile, initialMenuStructure console.debug('fetchDataAndUpdateState:padsMenu', padsMenu) // console.debug('fetchDataAndUpdateState:menuStructure', menuStructure) - console.debug('fetchDataAndUpdateState:collection', collection) + // console.debug('fetchDataAndUpdateState:collection', collection) // const newPrimary = Array.isArray(menuStructure?.primary) ? menuStructure.primary : [] diff --git a/lib/hooks/usePageMenu.js b/lib/hooks/usePageMenu.js index 42f12981..d07348af 100644 --- a/lib/hooks/usePageMenu.js +++ b/lib/hooks/usePageMenu.js @@ -22,9 +22,9 @@ import path from 'path'; export function usePageMenu(initialMenuStructure, collection) { - + const [menuStructure, setMenuStructure] = useState(null); - + useEffect(() => { // add to the menu structure const fetchPadMenu = async () => { const res = await fetch(`/api/structure?cache=true`); // fetch draft content to add to the menus. @@ -33,27 +33,30 @@ export function usePageMenu(initialMenuStructure, collection) { }; const fetchDataAndUpdateState = async () => { - const padsMenu = await fetchPadMenu(); - - - const newMenuStructure = { - ...initialMenuStructure, - primary: [ - ...(Array.isArray(initialMenuStructure?.primary) - ? initialMenuStructure.primary - : []), - ...(Array.isArray(padsMenu?.collections[collection]) - ? padsMenu.collections[collection] - : []), - ], - relatedContent: deepMerge( - initialMenuStructure?.relatedContent || {}, - padsMenu?.relatedContent || {}), - }; - - const mergedMenu = mergePadMenu(newMenuStructure, padsMenu) - - setMenuStructure(mergedMenu); + const padsMenu = await fetchPadMenu(); + + + + const newPrimary = (Array.isArray(initialMenuStructure?.primary) + ? initialMenuStructure.primary + : []).concat(Array.isArray(padsMenu?.collections[collection.path]) + ? padsMenu.collections[collection.path] + : []) + + + const newRelatedContent = deepMerge( + initialMenuStructure?.relatedContent || {}, + padsMenu?.relatedContent || {}) + + + let mergedPrimary = newPrimary; + newPrimary.forEach((item, index) => { + mergedPrimary[index] = mergeObjects(newPrimary[index], padsMenu.relatedContent); + // console.debug('fetchDataAndUpdateState:mergedPrimary[index]', mergedPrimary[index]) + + }); + + setMenuStructure({ primary: mergedPrimary, relatedContent: newRelatedContent }); }; fetchDataAndUpdateState(); @@ -68,8 +71,8 @@ export function usePageMenu(initialMenuStructure, collection) { // merge the child entries from Etherpad function mergePadMenu(newMenuStructure, padsMenu) { - let mergedStructure = newMenuStructure.primary ; - + let mergedStructure = newMenuStructure.primary; + Object.keys(padsMenu.relatedContent).forEach(key => { newMenuStructure.primary.forEach((item, index) => { @@ -85,7 +88,7 @@ function mergePadMenu(newMenuStructure, padsMenu) { }); - return {...newMenuStructure, primary: mergedStructure} + return { ...newMenuStructure, primary: mergedStructure } } @@ -135,3 +138,43 @@ function deepMergeObj(target, source) { } return target; } + + +function mergeObjects(obj1, obj2) { + + // console.debug('mergeObjects:obj2', obj2) + + // Extract the directory from the URL + let directory = obj1.url.split('/').slice(0, -1).join('/'); + // Remove the leading '/' if it exists + if (directory.startsWith('/')) { + directory = directory.substring(1); + } + + // Check if the directory key exists in the second object + if (obj2[directory]) { + + // console.debug('mergeObjects:obj2[directory]', obj2[directory]) + + // Iterate over each category + Object.keys(obj2[directory]).forEach(category => { + // If category does not exist in obj1, create it + if (!obj1.children[category]) { + obj1.children[category] = []; + } + + obj2[directory][category].forEach(item => { + // Check if URL exists in obj1's category array + if (!obj1.children[category].some(e => e.url === item.url)) { + obj1.children[category].push(item); + } + }); + }); + } + + return obj1; +} + + + +