From e3da3b3ddc960a22c6aa535b2c5a5b2c563dddf2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Sat, 3 Aug 2019 17:27:40 +0200 Subject: [PATCH] Simplify page tree logic This is preparation for #6041. For historic reasons, the code for bulding the section tree and the taxonomies were very much separate. This works, but makes it hard to extend, maintain, and possibly not so fast as it could be. This simplification also introduces 3 slightly breaking changes, which I suspect most people will be pleased about. See referenced issues: Fixes #6154 Fixes #6153 Fixes #6152 --- common/herrors/errors.go | 5 +- common/maps/maps_get.go | 28 ++++ go.mod | 1 + go.sum | 2 + hugolib/hugo_sites.go | 132 +-------------- hugolib/hugo_sites_build.go | 44 +++-- hugolib/hugo_sites_build_test.go | 1 - hugolib/page.go | 46 +++-- hugolib/page__common.go | 2 + hugolib/page__data.go | 24 +-- hugolib/page__per_output.go | 3 +- hugolib/pagebundler_test.go | 76 +++++++++ hugolib/pagecollections.go | 125 ++++++++++++++ hugolib/pages_capture.go | 3 +- hugolib/pages_map.go | 280 +++++++++++++++++++++++++++++++ hugolib/site.go | 64 +------ hugolib/taxonomy.go | 6 +- resources/page/weighted.go | 11 +- 18 files changed, 590 insertions(+), 263 deletions(-) create mode 100644 common/maps/maps_get.go create mode 100644 hugolib/pages_map.go diff --git a/common/herrors/errors.go b/common/herrors/errors.go index 1a61070501a..e484ecb8002 100644 --- a/common/herrors/errors.go +++ b/common/herrors/errors.go @@ -50,9 +50,10 @@ func FprintStackTrace(w io.Writer, err error) { // Recover is a helper function that can be used to capture panics. // Put this at the top of a method/function that crashes in a template: // defer herrors.Recover() -func Recover() { +func Recover(args ...interface{}) { if r := recover(); r != nil { - fmt.Println("stacktrace from panic: \n" + string(debug.Stack())) + args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n") + fmt.Println(args...) } } diff --git a/common/maps/maps_get.go b/common/maps/maps_get.go new file mode 100644 index 00000000000..38a5f504af3 --- /dev/null +++ b/common/maps/maps_get.go @@ -0,0 +1,28 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package maps + +import ( + "github.com/spf13/cast" +) + +// GetString tries to get a value with key from map m and convert it to a string. +// It will return an empty string if not found or if it cannot be convertd to a string. +func GetString(m map[string]interface{}, key string) string { + v, found := m[key] + if !found { + return "" + } + return cast.ToString(v) +} diff --git a/go.mod b/go.mod index 616dce102e0..6937eead72d 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 github.com/alecthomas/chroma v0.6.4 github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 // indirect + github.com/armon/go-radix v1.0.0 github.com/aws/aws-sdk-go v1.19.40 github.com/bep/debounce v1.2.0 github.com/bep/gitmap v1.1.0 diff --git a/go.sum b/go.sum index 94249500f7c..3bcfdcb7b84 100644 --- a/go.sum +++ b/go.sum @@ -58,6 +58,8 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF github.com/anacrolix/dms v0.0.0-20180117034613-8af4925bffb5/go.mod h1:DGqLjaZ3ziKKNRt+U5Q9PLWJ52Q/4rxfaaH/b3QYKaE= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.18.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.19.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 6ad8715645b..f4fdcf4c530 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -14,9 +14,7 @@ package hugolib import ( - "fmt" "io" - "path" "path/filepath" "sort" "strings" @@ -623,142 +621,14 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...) } -// createMissingPages creates home page, taxonomies etc. that isnt't created as an -// effect of having a content file. -func (h *HugoSites) createMissingPages() error { - - for _, s := range h.Sites { - if s.isEnabled(page.KindHome) { - // home pages - homes := s.findWorkPagesByKind(page.KindHome) - if len(homes) > 1 { - panic("Too many homes") - } - var home *pageState - if len(homes) == 0 { - home = s.newPage(page.KindHome) - s.workAllPages = append(s.workAllPages, home) - } else { - home = homes[0] - } - - s.home = home - } - - // Will create content-less root sections. - newSections := s.assembleSections() - s.workAllPages = append(s.workAllPages, newSections...) - - taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm) - taxonomyEnabled := s.isEnabled(page.KindTaxonomy) - - // taxonomy list and terms pages - taxonomies := s.Language().GetStringMapString("taxonomies") - if len(taxonomies) > 0 { - taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy) - taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm) - - // Make them navigable from WeightedPage etc. - for _, p := range taxonomyPages { - ni := p.getTaxonomyNodeInfo() - if ni == nil { - // This can be nil for taxonomies, e.g. an author, - // with a content file, but no actual usage. - // Create one. - sections := p.SectionsEntries() - if len(sections) < 2 { - // Invalid state - panic(fmt.Sprintf("invalid taxonomy state for %q with sections %v", p.pathOrTitle(), sections)) - } - ni = p.s.taxonomyNodes.GetOrAdd(sections[0], path.Join(sections[1:]...)) - } - ni.TransferValues(p) - } - for _, p := range taxonomyTermsPages { - p.getTaxonomyNodeInfo().TransferValues(p) - } - - for _, plural := range taxonomies { - if taxonomyTermEnabled { - foundTaxonomyTermsPage := false - for _, p := range taxonomyTermsPages { - if p.SectionsPath() == plural { - foundTaxonomyTermsPage = true - break - } - } - - if !foundTaxonomyTermsPage { - n := s.newPage(page.KindTaxonomyTerm, plural) - n.getTaxonomyNodeInfo().TransferValues(n) - s.workAllPages = append(s.workAllPages, n) - } - } - - if taxonomyEnabled { - for termKey := range s.Taxonomies[plural] { - - foundTaxonomyPage := false - - for _, p := range taxonomyPages { - sectionsPath := p.SectionsPath() - - if !strings.HasPrefix(sectionsPath, plural) { - continue - } - - singularKey := strings.TrimPrefix(sectionsPath, plural) - singularKey = strings.TrimPrefix(singularKey, "/") - - if singularKey == termKey { - foundTaxonomyPage = true - break - } - } - - if !foundTaxonomyPage { - info := s.taxonomyNodes.Get(plural, termKey) - if info == nil { - panic("no info found") - } - - n := s.newTaxonomyPage(info.term, info.plural, info.termKey) - info.TransferValues(n) - s.workAllPages = append(s.workAllPages, n) - } - } - } - } - } - } - - return nil -} - func (h *HugoSites) removePageByFilename(filename string) { for _, s := range h.Sites { s.removePageFilename(filename) } } +// TODO(bep) cm func (h *HugoSites) createPageCollections() error { - for _, s := range h.Sites { - for _, p := range s.rawAllPages { - if !s.isEnabled(p.Kind()) { - continue - } - - shouldBuild := s.shouldBuild(p) - s.buildStats.update(p) - if shouldBuild { - if p.m.headless { - s.headlessPages = append(s.headlessPages, p) - } else { - s.workAllPages = append(s.workAllPages, p) - } - } - } - } allPages := newLazyPagesFactory(func() page.Pages { var pages page.Pages diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index d20932599c3..1da8091472f 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -18,7 +18,6 @@ import ( "context" "fmt" "runtime/trace" - "sort" "github.com/gohugoio/hugo/output" @@ -235,26 +234,41 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } } - if err := h.createPageCollections(); err != nil { - return err - } - if config.whatChanged.source { - for _, s := range h.Sites { - if err := s.assembleTaxonomies(); err != nil { - return err - } - } + //for _, s := range h.Sites { + // TODO(bep) cm + //if err := s.assembleTaxonomies(); err != nil { + // return err + //} + //} } - // Create pagexs for the section pages etc. without content file. - if err := h.createMissingPages(); err != nil { + // Create pages for the section pages etc. without content file. + // TODO(bep) cm + /*if err := h.createMissingPages(); err != nil { return err - } + }*/ for _, s := range h.Sites { - s.setupSitePages() - sort.Stable(s.workAllPages) + if err := s.createPagesMap(s); err != nil { + return err + } + + if err := s.pagesMap.assembleTaxonomies(s); err != nil { + return err + } + + if err := s.createWorkAllPages(); err != nil { + return err + } + + // TODO(bep) cm + //s.setupSitePages() + //sort.Stable(s.workAllPages) + } + + if err := h.createPageCollections(); err != nil { + return err } return nil diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 876f21cfa6d..0ca824bccc7 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -365,7 +365,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.NotNil(t, enTags["tag1"]) require.NotNil(t, frTags["FRtag1"]) b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/") - b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/") // Check Blackfriday config require.True(t, strings.Contains(content(doc1fr), "«"), content(doc1fr)) diff --git a/hugolib/page.go b/hugolib/page.go index 676cba762ac..4fb392aefa5 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -23,6 +23,8 @@ import ( "sort" "strings" + "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/hugofs/files" "github.com/bep/gitmap" @@ -121,6 +123,15 @@ func (p *pageState) MarshalJSON() ([]byte, error) { return page.MarshalPageToJSON(p) } +func (p *pageState) getPages() page.Pages { + b := p.bucket + if b == nil { + return nil + } + return b.pages + +} + func (p *pageState) Pages() page.Pages { p.pagesInit.Do(func() { if p.pages != nil { @@ -132,19 +143,18 @@ func (p *pageState) Pages() page.Pages { switch p.Kind() { case page.KindPage: case page.KindHome: + // TODO(bep) cm pages = p.s.RegularPages() + case page.KindSection: + pages = p.getPages() case page.KindTaxonomy: - termInfo := p.getTaxonomyNodeInfo() - taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey) + termInfo := p.bucket + plural := maps.GetString(termInfo.meta, "plural") + term := maps.GetString(termInfo.meta, "term") + taxonomy := p.s.Taxonomies[plural].Get(term) pages = taxonomy.Pages() case page.KindTaxonomyTerm: - plural := p.getTaxonomyNodeInfo().plural - // A list of all page.KindTaxonomy pages with matching plural - for _, p := range p.s.findPagesByKind(page.KindTaxonomy) { - if p.SectionsEntries()[0] == plural { - pages = append(pages, p) - } - } + pages = p.getPages() case kind404, kindSitemap, kindRobotsTXT: pages = p.s.Pages() } @@ -296,9 +306,9 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor { section = sections[0] } case page.KindTaxonomyTerm: - section = p.getTaxonomyNodeInfo().singular + // TODO(bep) cm section = p.getTaxonomyNodeInfo().singular case page.KindTaxonomy: - section = p.getTaxonomyNodeInfo().parent.singular + // TODO(bep) cm section = p.getTaxonomyNodeInfo().parent.singular default: } @@ -743,20 +753,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { return nil } -func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo { - info := p.s.taxonomyNodes.Get(p.SectionsEntries()...) - - if info == nil { - // There can be unused content pages for taxonomies (e.g. author that - // has not written anything, yet), and these will not have a taxonomy - // node created in the assemble taxonomies step. - return nil - } - - return info - -} - func (p *pageState) sortParentSections() { if p.parent == nil { return diff --git a/hugolib/page__common.go b/hugolib/page__common.go index f9ceee8c905..7d10d0f7685 100644 --- a/hugolib/page__common.go +++ b/hugolib/page__common.go @@ -30,6 +30,8 @@ type pageCommon struct { s *Site m *pageMeta + bucket *pagesMapBucket + // Laziliy initialized dependencies. init *lazy.Init diff --git a/hugolib/page__data.go b/hugolib/page__data.go index 79a64931b4a..8bc818a00a0 100644 --- a/hugolib/page__data.go +++ b/hugolib/page__data.go @@ -16,6 +16,8 @@ package hugolib import ( "sync" + "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/resources/page" ) @@ -36,22 +38,22 @@ func (p *pageData) Data() interface{} { switch p.Kind() { case page.KindTaxonomy: - termInfo := p.getTaxonomyNodeInfo() - pluralInfo := termInfo.parent + bucket := p.bucket + meta := bucket.meta + plural := maps.GetString(meta, "plural") + singular := maps.GetString(meta, "singular") - singular := pluralInfo.singular - plural := pluralInfo.plural - term := termInfo.term - taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey) + taxonomy := p.s.Taxonomies[plural].Get(maps.GetString(meta, "termKey")) p.data[singular] = taxonomy - p.data["Singular"] = singular + p.data["Singular"] = meta["singular"] p.data["Plural"] = plural - p.data["Term"] = term + p.data["Term"] = meta["term"] case page.KindTaxonomyTerm: - info := p.getTaxonomyNodeInfo() - plural := info.plural - singular := info.singular + bucket := p.bucket + meta := bucket.meta + plural := maps.GetString(meta, "plural") + singular := maps.GetString(meta, "singular") p.data["Singular"] = singular p.data["Plural"] = plural diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go index 177e0420a8b..aa0fcd48835 100644 --- a/hugolib/page__per_output.go +++ b/hugolib/page__per_output.go @@ -27,9 +27,8 @@ import ( bp "github.com/gohugoio/hugo/bufferpool" "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" ) diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index 5c21dc4725d..a06a1e6d418 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -1040,6 +1040,10 @@ slug: leaf b.WithContent("sv/b1/data2.json", "sv: data2") b.WithContent("nb/b1/data2.json", "nb: data2") + b.WithContent("en/b3/_index.md", createPage("en: branch")) + b.WithContent("en/b3/p1.md", createPage("en: page")) + b.WithContent("en/b3/data1.json", "en: data") + b.Build(BuildCfg{}) b.AssertFileContent("public/en/index.html", @@ -1156,3 +1160,75 @@ Num Pages: {{ len .Site.Pages }} ) } + +func TestPageBundlerBasic(t *testing.T) { + b := newTestSitesBuilder(t) + b.WithConfigFile("toml", ` +baseURL = "https://example.org" +[outputs] + home = ["HTML", "RSS"] + taxonomy = ["HTML", "RSS"] + taxonomyTerm = ["HTML", "RSS"] + section = ["HTML", "RSS"] + +`) + pageContent := func(w int) string { + return fmt.Sprintf(` +--- +title: "Page" +weight: %d +--- + +`, w) + } + + pageContentTaxo := func(w int) string { + return fmt.Sprintf(` +--- +title: "Page With Taxo" +weight: %d +categories: ["a", "b"] +tags: ["blue", "green"] +--- + +`, w) + } + + b.WithContent( + "_index.md", pageContent(70), + "r1/page1.md", pageContent(80), + "r1/index.md", pageContent(90), + "r1.md", pageContent(100), + "s1/page1.md", pageContent(120), + "s1/page2.md", pageContent(1), + "s1/_index.md", pageContent(300), + "s1/s1_s1/_index.md", pageContent(400), + "s1/s1_s1/page1.md", pageContent(500), + "s1/s1_s2/_index.md", pageContent(600), + "s1/s1_s2/page1.md", pageContent(700), + "s1/subfolder/page1.md", pageContentTaxo(800), + "categories/_index.md", pageContent(900), + "tags/_index.md", pageContent(1000), + ) + + b.WithTemplates("index.html", ` +{{ template "sect" (dict "page" . "level" 0) }} +{{ define "sect" }} +{{- $page := .page -}} +{{- $level := .level -}} +{{ range seq $level }} {{ end }} Sect:|{{ $page.Kind }}|{{ $page.Path }} +{{ range $page.Pages }} +{{ range seq $level }} {{ end }} Sect Page:|{{ .Kind }}|{{ .Path }}|{{ .Section }}| +{{ end }} +{{ range $page.Sections }}{{ template "sect" (dict "page" . "level" (add $level 1) ) }}{{ end }} +{{ end }} +`) + + b.Build(BuildCfg{}) + + //s := b.H.Sites[0] + + //s.pagesMap.dump() + b.AssertFileContent("public/index.html", "asdf") + +} diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index aedcf40901e..f36493f949e 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -17,9 +17,12 @@ import ( "fmt" "path" "path/filepath" + "sort" "strings" "sync" + "github.com/gohugoio/hugo/resources/resource" + "github.com/pkg/errors" "github.com/gohugoio/hugo/cache" @@ -32,6 +35,7 @@ var ambiguityFlag = &pageState{} // PageCollections contains the page collections for a site. type PageCollections struct { + pagesMap *pagesMap // Includes absolute all pages (of all types), including drafts etc. rawAllPages pageStatePages @@ -389,3 +393,124 @@ func (c *PageCollections) clearResourceCacheForPage(page *pageState) { page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget) } } + +func (c *PageCollections) createPagesMap(s *Site) error { + c.pagesMap = newPagesMap(s) + + rootSections := make(map[string]bool) + + for _, p := range c.rawAllPages { + if p.IsPage() { + rootSections[p.Section()] = true + continue + } + c.pagesMap.addPage(p) + } + + // Create missing home page and the first level sections if no + // _index provided. + s.home = c.pagesMap.getOrCreateHome() + for k, _ := range rootSections { + c.pagesMap.createSectionIfNotExists(k) + } + + // Attach the regular pages to their section. + for _, p := range c.rawAllPages { + if p.IsNode() { + continue + } + c.pagesMap.addPage(p) + } + + return nil +} + +func (c *PageCollections) createWorkAllPages() error { + c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages)) + + var ( + parentSection *pageState + parentSectionLevel int + homeDates *resource.Dates + sectionDates *resource.Dates + ) + + var bucketsToRemove []string + + c.pagesMap.r.Walk(func(s string, v interface{}) bool { + bucket := v.(*pagesMapBucket) + + if bucket.owner.IsHome() { + + if resource.IsZeroDates(bucket.owner) { + // Calculate dates from the page tree. + homeDates = &bucket.owner.m.Dates + } + } + + currentLevel := strings.Count(s, "/") + + if parentSection == nil { + parentSection = bucket.owner + parentSectionLevel = currentLevel + sectionDates = nil + if resource.IsZeroDates(parentSection) { + sectionDates = &parentSection.m.Dates + } + } else { + bucket.owner.parent = parentSection + parentSection.subSections = append(parentSection.subSections, bucket.owner) + if currentLevel != parentSectionLevel { + parentSection = bucket.owner + parentSectionLevel = currentLevel + sectionDates = nil + if resource.IsZeroDates(parentSection) { + sectionDates = &parentSection.m.Dates + } + } + } + + tmp := bucket.pages[:0] + for _, x := range bucket.pages { + if c.pagesMap.s.shouldBuild(x) { + tmp = append(tmp, x) + } + } + bucket.pages = tmp + + if len(bucket.pages) == 0 { + if bucket.owner.IsSection() && bucket.owner.File().IsZero() { + bucketsToRemove = append(bucketsToRemove, s) + return false + } + } + + c.workAllPages = append(c.workAllPages, bucket.owner) + + if !bucket.view { + for _, p := range bucket.pages { + ps := p.(*pageState) + ps.parent = bucket.owner + c.workAllPages = append(c.workAllPages, ps) + + if homeDates != nil { + homeDates.UpdateDateAndLastmodIfAfter(ps) + } + + if sectionDates != nil { + sectionDates.UpdateDateAndLastmodIfAfter(ps) + } + } + } + + return false + }) + + for _, key := range bucketsToRemove { + c.pagesMap.r.Delete(key) + } + + sort.Sort(c.workAllPages) + + return nil +} diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index 361b87e84c3..1ace6d028da 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -36,9 +36,8 @@ import ( "github.com/gohugoio/hugo/source" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" ) diff --git a/hugolib/pages_map.go b/hugolib/pages_map.go new file mode 100644 index 00000000000..cb8cf79d052 --- /dev/null +++ b/hugolib/pages_map.go @@ -0,0 +1,280 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import ( + "fmt" + "path" + "path/filepath" + "strings" + + radix "github.com/armon/go-radix" + "github.com/spf13/cast" + + "github.com/gohugoio/hugo/resources/page" +) + +type pagesMap struct { + r *radix.Tree + s *Site +} + +type pagesMapBucket struct { + // Set if the pages in this bucket is also present in another bucket. + view bool + + // Some additional metatadata attached to this node. + meta map[string]interface{} + + owner *pageState + pages page.Pages +} + +func (m *pagesMap) cleanKey(key string) string { + if !strings.HasPrefix(key, "/") { + key = "/" + key + } + if key != "/" { + key = strings.TrimRight(key, "/") + } + return key +} + +func (m *pagesMap) addBucket(p *pageState) { + key := m.getKey(p) + m.addBucketFor(key, p, nil) +} + +func (m *pagesMap) addBucketFor(key string, p *pageState, meta map[string]interface{}) *pagesMapBucket { + var isView bool + switch p.Kind() { + case page.KindTaxonomy, page.KindTaxonomyTerm: + isView = true + } + + bucket := &pagesMapBucket{owner: p, view: isView} + p.bucket = bucket + + m.r.Insert(key, bucket) + + return bucket +} + +func (m *pagesMap) getOrCreateHome() *pageState { + var home *pageState + b, found := m.r.Get("/") + if !found { + home = m.s.newPage(page.KindHome) + m.addBucketFor("/", home, nil) + } else { + home = b.(*pagesMapBucket).owner + } + + return home +} + +func (m *pagesMap) createSectionIfNotExists(section string) { + key := m.cleanKey(section) + _, found := m.r.Get(key) + if !found { + p := m.s.newPage(page.KindSection, section) + m.addBucketFor(key, p, nil) + } +} + +func (m *pagesMap) getKey(p *pageState) string { + if !p.File().IsZero() { + return m.cleanKey(filepath.ToSlash(p.File().Dir())) + } + return m.cleanKey(p.SectionsPath()) +} + +func (m *pagesMap) addPage(p *pageState) { + if !p.IsPage() { + m.addBucket(p) + return + } + + key := m.getKey(p) + + var bucket *pagesMapBucket + + _, v, found := m.r.LongestPrefix(key) + if !found { + panic(fmt.Sprintf("[BUG] bucket with key %q not found", key)) + } + + bucket = v.(*pagesMapBucket) + p.bucket = bucket + + bucket.pages = append(bucket.pages, p) + +} + +func (m *pagesMap) Get(key string) *pagesMapBucket { + key = m.cleanKey(key) + v, found := m.r.Get(key) + if !found { + return nil + } + + return v.(*pagesMapBucket) +} + +func (m *pagesMap) assembleTaxonomies(s *Site) error { + s.Taxonomies = make(TaxonomyList) + + type bucketKey struct { + plural string + termKey string + } + + // Temporary cache. + taxonomyBuckets := make(map[bucketKey]*pagesMapBucket) + + for singular, plural := range s.siteCfg.taxonomiesConfig { + s.Taxonomies[plural] = make(Taxonomy) + var bucket *pagesMapBucket + bkey := bucketKey{ + plural: plural, + } + b, found := taxonomyBuckets[bkey] + if found { + bucket = b + } else { + // Create the page and bucket + n := s.newPage(page.KindTaxonomyTerm, plural) + meta := map[string]interface{}{ + "singular": singular, + "plural": plural, + } + + key := m.cleanKey(plural) + bucket = m.addBucketFor(key, n, meta) + + // Add it to the temporary cache. + taxonomyBuckets[bkey] = bucket + + } + + } + + addTaxonomy := func(singular, plural, term string, weight int, p page.Page) { + bkey := bucketKey{ + plural: plural, + } + + termKey := s.getTaxonomyKey(term) + + b1 := taxonomyBuckets[bkey] + + var b2 *pagesMapBucket + bkey.termKey = termKey + b, found := taxonomyBuckets[bkey] + if found { + b2 = b + } else { + // Create the page and bucket + n := s.newPage(page.KindTaxonomy, plural, term) + meta := map[string]interface{}{ + "singular": singular, + "plural": plural, + "term": term, + "termKey": termKey, + } + key := m.cleanKey(path.Join(plural, termKey)) + b2 = m.addBucketFor(key, n, meta) + taxonomyBuckets[bkey] = b2 + } + + b1.pages = append(b1.pages, p) + + w := page.NewWeightedPage(weight, p, b2.owner) + + s.Taxonomies[plural].add(termKey, w) + + b1.owner.m.Dates.UpdateDateAndLastmodIfAfter(p) + b2.owner.m.Dates.UpdateDateAndLastmodIfAfter(p) + } + + m.r.Walk(func(k string, v interface{}) bool { + b := v.(*pagesMapBucket) + if b.view { + return false + } + + for singular, plural := range s.siteCfg.taxonomiesConfig { + for _, p := range b.pages { + + vals := getParam(p, plural, false) + + w := getParamToLower(p, plural+"_weight") + weight, err := cast.ToIntE(w) + if err != nil { + m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.Path()) + // weight will equal zero, so let the flow continue + } + + if vals != nil { + if v, ok := vals.([]string); ok { + for _, idx := range v { + addTaxonomy(singular, plural, idx, weight, p) + } + } else if v, ok := vals.(string); ok { + addTaxonomy(singular, plural, v, weight, p) + } else { + m.s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.Path()) + } + } + + } + } + return false + }) + + for _, plural := range s.siteCfg.taxonomiesConfig { + for k := range s.Taxonomies[plural] { + s.Taxonomies[plural][k].Sort() + } + } + + return nil +} + +func (m *pagesMap) dump() { + m.r.Walk(func(s string, v interface{}) bool { + b := v.(*pagesMapBucket) + fmt.Println("-------\n", s, ":", b.owner.Kind(), ":") + if b.owner != nil { + fmt.Println("Owner:", b.owner.Path()) + } + for _, p := range b.pages { + fmt.Println(p.Path()) + } + return false + }) + +} + +func (m *pagesMap) key(p *pageState) string { + return p.Path() +} + +func newPagesMap(s *Site) *pagesMap { + return &pagesMap{ + r: radix.New(), + s: s, + } + +} diff --git a/hugolib/site.go b/hugolib/site.go index 882874db947..7f79044cd10 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -1090,6 +1090,7 @@ func (s *Site) process(config BuildCfg) (err error) { } +// TODO(bep) cm func (s *Site) setupSitePages() { var homeDates *resource.Dates if s.home != nil { @@ -1483,69 +1484,6 @@ func (s *Site) getTaxonomyKey(key string) string { return strings.ToLower(s.PathSpec.MakePath(key)) } -func (s *Site) assembleTaxonomies() error { - s.Taxonomies = make(TaxonomyList) - taxonomies := s.siteCfg.taxonomiesConfig - for _, plural := range taxonomies { - s.Taxonomies[plural] = make(Taxonomy) - } - - s.taxonomyNodes = &taxonomyNodeInfos{ - m: make(map[string]*taxonomyNodeInfo), - getKey: s.getTaxonomyKey, - } - - s.Log.INFO.Printf("found taxonomies: %#v\n", taxonomies) - - for singular, plural := range taxonomies { - parent := s.taxonomyNodes.GetOrCreate(plural, "") - parent.singular = singular - - addTaxonomy := func(plural, term string, weight int, p page.Page) { - key := s.getTaxonomyKey(term) - - n := s.taxonomyNodes.GetOrCreate(plural, term) - n.parent = parent - - w := page.NewWeightedPage(weight, p, n.owner) - - s.Taxonomies[plural].add(key, w) - - n.UpdateFromPage(w.Page) - parent.UpdateFromPage(w.Page) - } - - for _, p := range s.workAllPages { - vals := getParam(p, plural, false) - - w := getParamToLower(p, plural+"_weight") - weight, err := cast.ToIntE(w) - if err != nil { - s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.pathOrTitle()) - // weight will equal zero, so let the flow continue - } - - if vals != nil { - if v, ok := vals.([]string); ok { - for _, idx := range v { - addTaxonomy(plural, idx, weight, p) - } - } else if v, ok := vals.(string); ok { - addTaxonomy(plural, v, weight, p) - } else { - s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.pathOrTitle()) - } - } - } - - for k := range s.Taxonomies[plural] { - s.Taxonomies[plural][k].Sort() - } - } - - return nil -} - // Prepare site for a new full build. func (s *Site) resetBuildState() { s.relatedDocsHandler = s.relatedDocsHandler.Clone() diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go index a7965ec26cb..22a1b79956c 100644 --- a/hugolib/taxonomy.go +++ b/hugolib/taxonomy.go @@ -177,7 +177,7 @@ type taxonomyNodeInfo struct { parent *taxonomyNodeInfo // Either of Kind taxonomyTerm (parent) or taxonomy - owner *page.PageWrapper + owner page.Page } func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) { @@ -187,7 +187,7 @@ func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) { } func (t *taxonomyNodeInfo) TransferValues(p *pageState) { - t.owner.Page = p + //t.owner.Page = p if p.Lastmod().IsZero() && p.Date().IsZero() { p.m.Dates.UpdateDateAndLastmodIfAfter(t.dates) } @@ -195,6 +195,7 @@ func (t *taxonomyNodeInfo) TransferValues(p *pageState) { // Maps either plural or plural/term to a taxonomy node. // TODO(bep) consolidate somehow with s.Taxonomies +// TODO(bep) cm remove all of this type taxonomyNodeInfos struct { m map[string]*taxonomyNodeInfo getKey func(string) string @@ -230,7 +231,6 @@ func (t taxonomyNodeInfos) GetOrCreate(plural, term string) *taxonomyNodeInfo { plural: plural, termKey: termKey, term: term, - owner: &page.PageWrapper{}, // Page will be assigned later. } t.m[key] = n diff --git a/resources/page/weighted.go b/resources/page/weighted.go index 3f75bcc3cfe..48ed736ce0f 100644 --- a/resources/page/weighted.go +++ b/resources/page/weighted.go @@ -42,7 +42,7 @@ func (p WeightedPages) Page() Page { return nil } - return first.owner.Page + return first.owner } // A WeightedPage is a Page with a weight. @@ -54,15 +54,10 @@ type WeightedPage struct { // manual .Site.GetPage lookups. It is implemented in this roundabout way // because we cannot add additional state to the WeightedPages slice // without breaking lots of templates in the wild. - owner *PageWrapper + owner Page } -// PageWrapper wraps a Page. -type PageWrapper struct { - Page -} - -func NewWeightedPage(weight int, p Page, owner *PageWrapper) WeightedPage { +func NewWeightedPage(weight int, p Page, owner Page) WeightedPage { return WeightedPage{Weight: weight, Page: p, owner: owner} }