diff --git a/commands/convert.go b/commands/convert.go index c4f88a24537..78e7021560a 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -126,7 +126,7 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files") for _, p := range site.AllPages { - if err := cc.convertAndSavePage(p, site, format); err != nil { + if err := cc.convertAndSavePage(p.(*hugolib.Page), site, format); err != nil { return err } } @@ -135,7 +135,7 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { // The resources are not in .Site.AllPages. - for _, r := range p.Resources.ByType("page") { + for _, r := range p.Resources().ByType("page") { if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil { return err } diff --git a/commands/hugo.go b/commands/hugo.go index 3690c0ad519..c446c4865be 100644 --- a/commands/hugo.go +++ b/commands/hugo.go @@ -977,7 +977,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher, if navigate { if onePageName != "" { - p = c.hugo.GetContentPage(onePageName) + p = c.hugo.GetContentPage(onePageName).(*hugolib.Page) } } diff --git a/commands/list.go b/commands/list.go index 9922e957df8..1fb2fd2a815 100644 --- a/commands/list.go +++ b/commands/list.go @@ -17,6 +17,7 @@ import ( "path/filepath" "github.com/gohugoio/hugo/hugolib" + "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/cobra" jww "github.com/spf13/jwalterweatherman" ) @@ -66,8 +67,9 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.", } for _, p := range sites.Pages() { - if p.IsDraft() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + pp := p.(*hugolib.Page) + if pp.IsDraft() { + jww.FEEDBACK.Println(filepath.Join(pp.File.Dir(), pp.File.LogicalName())) } } @@ -102,8 +104,9 @@ posted in the future.`, } for _, p := range sites.Pages() { - if p.IsFuture() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsFuture(p) { + pp := p.(*hugolib.Page) + jww.FEEDBACK.Println(filepath.Join(pp.File.Dir(), pp.File.LogicalName())) } } @@ -138,8 +141,9 @@ expired.`, } for _, p := range sites.Pages() { - if p.IsExpired() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsExpired(p) { + pp := p.(*hugolib.Page) + jww.FEEDBACK.Println(filepath.Join(pp.File.Dir(), pp.File.LogicalName())) } } diff --git a/hugolib/collections.go b/hugolib/collections.go index cf75d373221..09065b696ad 100644 --- a/hugolib/collections.go +++ b/hugolib/collections.go @@ -102,3 +102,11 @@ func (pages Pages) ToResources() resource.Resources { } return r } + +func (p Pages) Group(key interface{}, in interface{}) (interface{}, error) { + pages, err := toPages(in) + if err != nil { + return nil, err + } + return PageGroup{Key: key, Pages: pages}, nil +} diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go index 3a6220b532f..f3f07654a3e 100644 --- a/hugolib/embedded_shortcodes_test.go +++ b/hugolib/embedded_shortcodes_test.go @@ -20,6 +20,8 @@ import ( "strings" "testing" + "github.com/spf13/cast" + "path/filepath" "github.com/gohugoio/hugo/deps" @@ -69,7 +71,9 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) { require.Len(t, s.RegularPages, 1) - output := string(s.RegularPages[0].content()) + content, err := s.RegularPages[0].Content() + require.NoError(t, err) + output := cast.ToString(content) if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected) diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 9ce1c438e75..42f68c3a222 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -32,6 +32,8 @@ import ( "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/i18n" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" ) @@ -136,7 +138,7 @@ func (h *HugoSites) langSite() map[string]*Site { // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. -func (h *HugoSites) GetContentPage(filename string) *Page { +func (h *HugoSites) GetContentPage(filename string) page.Page { for _, s := range h.Sites { pos := s.rawAllPages.findPagePosByFilename(filename) if pos == -1 { @@ -495,11 +497,14 @@ func (h *HugoSites) assignMissingTranslations() error { for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { nodes := h.findPagesByKindIn(nodeType, allPages) + // TODO(bep) page // Assign translations for _, t1 := range nodes { + t1p := t1.(*Page) for _, t2 := range nodes { - if t1.isNewTranslation(t2) { - t1.translations = append(t1.translations, t2) + t2p := t2.(*Page) + if t1p.isNewTranslation(t2p) { + t1p.translations = append(t1p.translations, t2p) } } } @@ -507,8 +512,10 @@ func (h *HugoSites) assignMissingTranslations() error { // Now we can sort the translations. for _, p := range allPages { - if len(p.translations) > 0 { - pageBy(languagePageSort).Sort(p.translations) + // TODO(bep) page + pp := p.(*Page) + if len(pp.translations) > 0 { + pageBy(languagePageSort).Sort(pp.translations) } } return nil @@ -548,7 +555,7 @@ func (h *HugoSites) createMissingPages() error { if s.isEnabled(KindTaxonomyTerm) { foundTaxonomyTermsPage := false for _, p := range taxonomyTermsPages { - if p.sectionsPath() == plural { + if p.(*Page).sectionsPath() == plural { foundTaxonomyTermsPage = true break } @@ -570,7 +577,7 @@ func (h *HugoSites) createMissingPages() error { key = s.PathSpec.MakePathSanitized(key) } for _, p := range taxonomyPages { - sectionsPath := p.sectionsPath() + sectionsPath := p.(*Page).sectionsPath() if !strings.HasPrefix(sectionsPath, plural) { continue @@ -631,18 +638,20 @@ func (h *HugoSites) removePageByFilename(filename string) { func (h *HugoSites) setupTranslations() { for _, s := range h.Sites { for _, p := range s.rawAllPages { - if p.Kind == kindUnknown { - p.Kind = p.kindFromSections() + // TODO(bep) page .(*Page) and all others + pp := p.(*Page) + if p.Kind() == kindUnknown { + pp.kind = pp.kindFromSections() } - if !p.s.isEnabled(p.Kind) { + if !pp.s.isEnabled(p.Kind()) { continue } - shouldBuild := p.shouldBuild() - s.updateBuildStats(p) + shouldBuild := pp.shouldBuild() + s.updateBuildStats(pp) if shouldBuild { - if p.headless { + if pp.headless { s.headlessPages = append(s.headlessPages, p) } else { s.Pages = append(s.Pages, p) @@ -676,13 +685,13 @@ func (h *HugoSites) setupTranslations() { func (s *Site) preparePagesForRender(start bool) error { for _, p := range s.Pages { - if err := p.prepareForRender(start); err != nil { + if err := p.(*Page).prepareForRender(start); err != nil { return err } } for _, p := range s.headlessPages { - if err := p.prepareForRender(start); err != nil { + if err := p.(*Page).prepareForRender(start); err != nil { return err } } @@ -720,11 +729,11 @@ func (s *Site) updateBuildStats(page *Page) { s.draftCount++ } - if page.IsFuture() { + if resource.IsFuture(page) { s.futureCount++ } - if page.IsExpired() { + if resource.IsExpired(page) { s.expiredCount++ } } diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index ec5070fa814..2acf2ea5063 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -237,19 +237,20 @@ func (h *HugoSites) assemble(config *BuildCfg) error { for _, pages := range []Pages{s.Pages, s.headlessPages} { for _, p := range pages { // May have been set in front matter - if len(p.outputFormats) == 0 { - p.outputFormats = s.outputFormats[p.Kind] + pp := p.(*Page) + if len(pp.outputFormats) == 0 { + pp.outputFormats = s.outputFormats[p.Kind()] } - if p.headless { + if pp.headless { // headless = 1 output format only - p.outputFormats = p.outputFormats[:1] + pp.outputFormats = pp.outputFormats[:1] } - for _, r := range p.Resources.ByType(pageResourceType) { - r.(*Page).outputFormats = p.outputFormats + for _, r := range p.Resources().ByType(pageResourceType) { + r.(*Page).outputFormats = pp.outputFormats } - if err := p.initPaths(); err != nil { + if err := p.(*Page).initPaths(); err != nil { return err } diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 83b96b7f4e7..436c87aa6c7 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -234,7 +234,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md")) require.NotNil(t, gp1) - require.Equal(t, "doc1", gp1.title) + require.Equal(t, "doc1", gp1.Title()) gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md")) require.Nil(t, gp2) @@ -247,12 +247,12 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { assert.Equal(5, len(enSite.RegularPages)) assert.Equal(32, len(enSite.AllPages)) - doc1en := enSite.RegularPages[0] + doc1en := enSite.RegularPages[0].(*Page) permalink := doc1en.Permalink() require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink") require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself") - doc2 := enSite.RegularPages[1] + doc2 := enSite.RegularPages[1].(*Page) permalink = doc2.Permalink() require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink") @@ -263,11 +263,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { // and do no not do any language code prefixing. require.Equal(t, "http://example.com/blog/superbob/", permalink, "invalid doc3 permalink") - require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3") b.AssertFileContent("public/superbob/index.html", "doc3|Hello|en") require.Equal(t, doc2.PrevPage, doc3, "doc3 should follow doc2, in .PrevPage") - doc1fr := doc1en.Translations()[0] + doc1fr := doc1en.Translations()[0].(*Page) permalink = doc1fr.Permalink() require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink") @@ -275,10 +274,9 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation") require.Equal(t, "fr", doc1fr.Language().Lang) - doc4 := enSite.AllPages[4] + doc4 := enSite.AllPages[4].(*Page) permalink = doc4.Permalink() require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink") - require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL()) require.Len(t, doc4.Translations(), 0, "found translations for doc4") @@ -299,7 +297,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Len(t, frSite.AllPages, 32, "should have 32 total pages (including translations and nodes)") for _, frenchPage := range frSite.RegularPages { - require.Equal(t, "fr", frenchPage.Lang()) + p := frenchPage.(*Page) + require.Equal(t, "fr", p.Lang()) } // See https://github.com/gohugoio/hugo/issues/4285 @@ -331,11 +330,11 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { homeEn := enSite.getPage(KindHome) require.NotNil(t, homeEn) require.Len(t, homeEn.Translations(), 3) - require.Equal(t, "fr", homeEn.Translations()[0].Lang()) - require.Equal(t, "nn", homeEn.Translations()[1].Lang()) - require.Equal(t, "På nynorsk", homeEn.Translations()[1].title) - require.Equal(t, "nb", homeEn.Translations()[2].Lang()) - require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix) + require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang) + require.Equal(t, "nn", homeEn.Translations()[1].Language().Lang) + require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title()) + require.Equal(t, "nb", homeEn.Translations()[2].Language().Lang) + require.Equal(t, "På bokmål", homeEn.Translations()[2].Title(), configSuffix) require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix) sectFr := frSite.getPage(KindSection, "sect") @@ -343,20 +342,20 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Equal(t, "fr", sectFr.Lang()) require.Len(t, sectFr.Translations(), 1) - require.Equal(t, "en", sectFr.Translations()[0].Lang()) - require.Equal(t, "Sects", sectFr.Translations()[0].title) + require.Equal(t, "en", sectFr.Translations()[0].(*Page).Lang()) + require.Equal(t, "Sects", sectFr.Translations()[0].Title()) nnSite := sites[2] require.Equal(t, "nn", nnSite.Language.Lang) taxNn := nnSite.getPage(KindTaxonomyTerm, "lag") require.NotNil(t, taxNn) require.Len(t, taxNn.Translations(), 1) - require.Equal(t, "nb", taxNn.Translations()[0].Lang()) + require.Equal(t, "nb", taxNn.Translations()[0].(*Page).Lang()) taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal") require.NotNil(t, taxTermNn) require.Len(t, taxTermNn.Translations(), 1) - require.Equal(t, "nb", taxTermNn.Translations()[0].Lang()) + require.Equal(t, "nb", taxTermNn.Translations()[0].(*Page).Lang()) // Check sitemap(s) b.AssertFileContent("public/sitemap.xml", @@ -392,27 +391,27 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Equal(t, "Heim", nnSite.Menus["main"].ByName()[0].Name) // Issue #1302 - require.Equal(t, template.URL(""), enSite.RegularPages[0].RSSLink()) + require.Equal(t, template.URL(""), enSite.RegularPages[0].(*Page).RSSLink()) // Issue #3108 - prevPage := enSite.RegularPages[0].PrevPage + prevPage := enSite.RegularPages[0].(*Page).PrevPage require.NotNil(t, prevPage) - require.Equal(t, KindPage, prevPage.Kind) + require.Equal(t, KindPage, prevPage.Kind()) for { if prevPage == nil { break } - require.Equal(t, KindPage, prevPage.Kind) - prevPage = prevPage.PrevPage + require.Equal(t, KindPage, prevPage.Kind()) + prevPage = prevPage.(*Page).PrevPage } // Check bundles bundleFr := frSite.getPage(KindPage, "bundles/b1/index.md") require.NotNil(t, bundleFr) require.Equal(t, "/blog/fr/bundles/b1/", bundleFr.RelPermalink()) - require.Equal(t, 1, len(bundleFr.Resources)) - logoFr := bundleFr.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleFr.Resources())) + logoFr := bundleFr.Resources().GetMatch("logo*") require.NotNil(t, logoFr) require.Equal(t, "/blog/fr/bundles/b1/logo.png", logoFr.RelPermalink()) b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") @@ -420,8 +419,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { bundleEn := enSite.getPage(KindPage, "bundles/b1/index.en.md") require.NotNil(t, bundleEn) require.Equal(t, "/blog/en/bundles/b1/", bundleEn.RelPermalink()) - require.Equal(t, 1, len(bundleEn.Resources)) - logoEn := bundleEn.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleEn.Resources())) + logoEn := bundleEn.Resources().GetMatch("logo*") require.NotNil(t, logoEn) require.Equal(t, "/blog/en/bundles/b1/logo.png", logoEn.RelPermalink()) b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") @@ -504,9 +503,9 @@ func TestMultiSitesRebuild(t *testing.T) { assert.Len(enSite.RegularPages, 6) assert.Len(enSite.AllPages, 34) assert.Len(frSite.RegularPages, 5) - require.Equal(t, "new_fr_1", frSite.RegularPages[3].title) - require.Equal(t, "new_en_2", enSite.RegularPages[0].title) - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + require.Equal(t, "new_fr_1", frSite.RegularPages[3].Title()) + require.Equal(t, "new_en_2", enSite.RegularPages[0].Title()) + require.Equal(t, "new_en_1", enSite.RegularPages[1].Title()) rendered := readDestination(t, fs, "public/en/new1/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) @@ -540,7 +539,7 @@ func TestMultiSitesRebuild(t *testing.T) { }, func(t *testing.T) { assert.Len(enSite.RegularPages, 6, "Rename") - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + require.Equal(t, "new_en_1", enSite.RegularPages[1].Title()) rendered := readDestination(t, fs, "public/en/new1renamed/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) }}, @@ -582,7 +581,7 @@ func TestMultiSitesRebuild(t *testing.T) { homeEn := enSite.getPage(KindHome) require.NotNil(t, homeEn) assert.Len(homeEn.Translations(), 3) - require.Equal(t, "fr", homeEn.Translations()[0].Lang()) + require.Equal(t, "fr", homeEn.Translations()[0].(*Page).Lang()) }, }, @@ -626,12 +625,13 @@ func assertShouldNotBuild(t *testing.T, sites *HugoSites) { s := sites.Sites[0] for _, p := range s.rawAllPages { + pp := p.(*Page) // No HTML when not processed - require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("")), p.BaseFileName()+": "+string(p.workContent)) + require.Equal(t, pp.shouldBuild(), bytes.Contains(pp.workContent, []byte("")), pp.BaseFileName()+": "+string(pp.workContent)) - require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild())) + require.Equal(t, pp.shouldBuild(), pp.content() != "", fmt.Sprintf("%v:%v", pp.content(), pp.shouldBuild())) - require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName()) + require.Equal(t, pp.shouldBuild(), pp.content() != "", pp.BaseFileName()) } } @@ -678,20 +678,21 @@ title = "Svenska" homeEn := enSite.getPage(KindHome) require.NotNil(t, homeEn) require.Len(t, homeEn.Translations(), 4) - require.Equal(t, "sv", homeEn.Translations()[0].Lang()) + + require.Equal(t, "sv", homeEn.Translations()[0].(*Page).Lang()) require.Len(t, enSite.RegularPages, 5) require.Len(t, frSite.RegularPages, 4) // Veriy Swedish site require.Len(t, svSite.RegularPages, 1) - svPage := svSite.RegularPages[0] + svPage := svSite.RegularPages[0].(*Page) - require.Equal(t, "Swedish Contentfile", svPage.title) + require.Equal(t, "Swedish Contentfile", svPage.Title()) require.Equal(t, "sv", svPage.Lang()) require.Len(t, svPage.Translations(), 2) require.Len(t, svPage.AllTranslations(), 3) - require.Equal(t, "en", svPage.Translations()[0].Lang()) + require.Equal(t, "en", svPage.Translations()[0].(*Page).Lang()) // Regular pages have no children require.Len(t, svPage.Pages, 0) diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go index 83d6bfc9e9a..2b88224cb36 100644 --- a/hugolib/hugo_sites_multihost_test.go +++ b/hugolib/hugo_sites_multihost_test.go @@ -97,8 +97,8 @@ languageName = "Nynorsk" bundleEn := s1.getPage(KindPage, "bundles/b1/index.en.md") require.NotNil(t, bundleEn) require.Equal(t, "/docs/bundles/b1/", bundleEn.RelPermalink()) - require.Equal(t, 1, len(bundleEn.Resources)) - logoEn := bundleEn.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleEn.Resources())) + logoEn := bundleEn.Resources().GetMatch("logo*") require.NotNil(t, logoEn) require.Equal(t, "/docs/bundles/b1/logo.png", logoEn.RelPermalink()) b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") @@ -106,8 +106,8 @@ languageName = "Nynorsk" bundleFr := s2.getPage(KindPage, "bundles/b1/index.md") require.NotNil(t, bundleFr) require.Equal(t, "/bundles/b1/", bundleFr.RelPermalink()) - require.Equal(t, 1, len(bundleFr.Resources)) - logoFr := bundleFr.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleFr.Resources())) + logoFr := bundleFr.Resources().GetMatch("logo*") require.NotNil(t, logoFr) require.Equal(t, "/bundles/b1/logo.png", logoFr.RelPermalink()) b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") diff --git a/hugolib/language_content_dir_test.go b/hugolib/language_content_dir_test.go index 577fdfaeb77..45299c87cec 100644 --- a/hugolib/language_content_dir_test.go +++ b/hugolib/language_content_dir_test.go @@ -244,8 +244,9 @@ Content. for i, p := range enSite.RegularPages { j := i + 1 msg := fmt.Sprintf("Test %d", j) - assert.Equal("en", p.Lang(), msg) - assert.Equal("sect", p.Section()) + pp := p.(*Page) + assert.Equal("en", pp.Lang(), msg) + assert.Equal("sect", pp.Section()) if j < 9 { if j%4 == 0 { assert.Contains(p.Title(), fmt.Sprintf("p-sv-%d.en", i+1), msg) @@ -263,13 +264,13 @@ Content. assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink()) assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink()) - assert.Equal(4, len(bundleEn.Resources)) - assert.Equal(4, len(bundleNn.Resources)) - assert.Equal(4, len(bundleSv.Resources)) + assert.Equal(4, len(bundleEn.Resources())) + assert.Equal(4, len(bundleNn.Resources())) + assert.Equal(4, len(bundleSv.Resources())) - assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources.GetMatch("logo*").RelPermalink()) - assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources.GetMatch("logo*").RelPermalink()) - assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources.GetMatch("logo*").RelPermalink()) + assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources().GetMatch("logo*").RelPermalink()) + assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources().GetMatch("logo*").RelPermalink()) + assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources().GetMatch("logo*").RelPermalink()) b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv") b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn") diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go index 6a8c89b95ea..ffda4ead0ec 100644 --- a/hugolib/menu_test.go +++ b/hugolib/menu_test.go @@ -85,7 +85,7 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`, require.Len(t, s.Menus, 2) - p1 := s.RegularPages[0].Menus() + p1 := s.RegularPages[0].(*Page).Menus() // There is only one menu in the page, but it is "member of" 2 require.Len(t, p1, 1) diff --git a/hugolib/page.go b/hugolib/page.go index 71070d1e8cd..e5c18555645 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -35,6 +35,7 @@ import ( "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugolib/pagemeta" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/output" @@ -107,14 +108,14 @@ type Page struct { *pageInit *pageContentInit - // Kind is the discriminator that identifies the different page types + // kind is the discriminator that identifies the different page types // in the different page collections. This can, as an example, be used // to to filter regular pages, find sections etc. // Kind will, for the pages available to the templates, be one of: // page, home, section, taxonomy and taxonomyTerm. // It is of string type to make it easy to reason about in // the templates. - Kind string + kind string // Since Hugo 0.18 we got rid of the Node type. So now all pages are ... // pages (regular pages, home page, sections etc.). @@ -127,7 +128,7 @@ type Page struct { // with itself. The resource will typically be placed relative to the Page, // but templates should use the links (Permalink and RelPermalink) // provided by the Resource object. - Resources resource.Resources + resources resource.Resources // This is the raw front matter metadata that is going to be assigned to // the Resources above. @@ -287,10 +288,18 @@ func stackTrace(length int) string { return string(trace) } +func (p *Page) Kind() string { + return p.kind +} + func (p *Page) Data() interface{} { return p.data } +func (p *Page) Resources() resource.Resources { + return p.resources +} + func (p *Page) initContent() { p.contentInit.Do(func() { @@ -357,6 +366,10 @@ func (p *Page) Truncated() bool { return p.truncated } +func (p *Page) Len() int { + return len(p.content()) +} + func (p *Page) content() template.HTML { p.initContent() return p.contentv @@ -383,17 +396,6 @@ func (p *Page) SearchKeywords(cfg related.IndexConfig) ([]related.Keyword, error return cfg.ToKeywords(v) } -// PubDate is when this page was or will be published. -// NOTE: This is currently used for search only and is not meant to be used -// directly in templates. We need to consolidate the dates in this struct. -// TODO(bep) see https://github.com/gohugoio/hugo/issues/3854 -func (p *Page) PubDate() time.Time { - if !p.PublishDate.IsZero() { - return p.PublishDate - } - return p.Date -} - func (*Page) ResourceType() string { return pageResourceType } @@ -409,7 +411,7 @@ func (p *Page) RSSLink() template.URL { func (p *Page) createLayoutDescriptor() output.LayoutDescriptor { var section string - switch p.Kind { + switch p.Kind() { case KindSection: // In Hugo 0.22 we introduce nested sections, but we still only // use the first level to pick the correct template. This may change in @@ -421,7 +423,7 @@ func (p *Page) createLayoutDescriptor() output.LayoutDescriptor { } return output.LayoutDescriptor{ - Kind: p.Kind, + Kind: p.Kind(), Type: p.Type(), Lang: p.Lang(), Layout: p.Layout, @@ -453,22 +455,22 @@ func (p *Page) resetContent() { // IsNode returns whether this is an item of one of the list types in Hugo, // i.e. not a regular content page. func (p *Page) IsNode() bool { - return p.Kind != KindPage + return p.Kind() != KindPage } // IsHome returns whether this is the home page. func (p *Page) IsHome() bool { - return p.Kind == KindHome + return p.Kind() == KindHome } // IsSection returns whether this is a section page. func (p *Page) IsSection() bool { - return p.Kind == KindSection + return p.Kind() == KindSection } // IsPage returns whether this is a regular content page. func (p *Page) IsPage() bool { - return p.Kind == KindPage + return p.Kind() == KindPage } // BundleType returns the bundle type: "leaf", "branch" or an empty string if it is none. @@ -499,17 +501,22 @@ type PageMeta struct { wordCount int fuzzyWordCount int readingTime int - Weight int + weight int +} + +func (p PageMeta) Weight() int { + return p.weight } type Position struct { - PrevPage *Page - NextPage *Page - PrevInSection *Page - NextInSection *Page + PrevPage page.Page + NextPage page.Page + PrevInSection page.Page + NextInSection page.Page } -type Pages []*Page +// TODO(bep) page move +type Pages []page.Page func (ps Pages) String() string { return fmt.Sprintf("Pages(%d)", len(ps)) @@ -525,7 +532,7 @@ func (ps Pages) shuffle() { func (ps Pages) findPagePosByFilename(filename string) int { for i, x := range ps { - if x.Filename() == filename { + if x.(*Page).Filename() == filename { return i } } @@ -558,8 +565,8 @@ func (ps Pages) findPagePosByFilnamePrefix(prefix string) int { // Find the closest match for i, x := range ps { - if strings.HasPrefix(x.Filename(), prefix) { - diff := len(x.Filename()) - prefixLen + if strings.HasPrefix(x.(*Page).Filename(), prefix) { + diff := len(x.(*Page).Filename()) - prefixLen if lenDiff == -1 || diff < lenDiff { lenDiff = diff currPos = i @@ -573,7 +580,7 @@ func (ps Pages) findPagePosByFilnamePrefix(prefix string) int { // will return -1 if not found func (ps Pages) findPagePos(page *Page) int { for i, x := range ps { - if x.Filename() == page.Filename() { + if x.(*Page).Filename() == page.Filename() { return i } } @@ -873,7 +880,7 @@ func (s *Site) newPageFromFile(fi *fileInfo) *Page { return &Page{ pageInit: &pageInit{}, pageContentInit: &pageContentInit{}, - Kind: kindFromFileInfo(fi), + kind: kindFromFileInfo(fi), contentType: "", File: fi, Keywords: []string{}, Sitemap: Sitemap{Priority: -1}, @@ -905,7 +912,7 @@ func (p *Page) Type() string { // since Hugo 0.22 we support nested sections, but this will always be the first // element of any nested path. func (p *Page) Section() string { - if p.Kind == KindSection || p.Kind == KindTaxonomy || p.Kind == KindTaxonomyTerm { + if p.Kind() == KindSection || p.Kind() == KindTaxonomy || p.Kind() == KindTaxonomyTerm { return p.sections[0] } return p.File.Section() @@ -1033,7 +1040,7 @@ func (p *Page) IsTranslated() bool { func (p *Page) Translations() Pages { translations := make(Pages, 0) for _, t := range p.translations { - if t.Lang() != p.Lang() { + if t.(*Page).Lang() != p.Lang() { translations = append(translations, t) } } @@ -1046,14 +1053,14 @@ func (p *Page) Translations() Pages { // The Page Kind is always prepended. func (p *Page) TranslationKey() string { if p.translationKey != "" { - return p.Kind + "/" + p.translationKey + return p.Kind() + "/" + p.translationKey } if p.IsNode() { - return path.Join(p.Kind, path.Join(p.sections...), p.TranslationBaseName()) + return path.Join(p.Kind(), path.Join(p.sections...), p.TranslationBaseName()) } - return path.Join(p.Kind, filepath.ToSlash(p.Dir()), p.TranslationBaseName()) + return path.Join(p.Kind(), filepath.ToSlash(p.Dir()), p.TranslationBaseName()) } func (p *Page) LinkTitle() string { @@ -1065,7 +1072,7 @@ func (p *Page) LinkTitle() string { func (p *Page) shouldBuild() bool { return shouldBuild(p.s.BuildFuture, p.s.BuildExpired, - p.s.BuildDrafts, p.Draft, p.PublishDate, p.ExpiryDate) + p.s.BuildDrafts, p.Draft, p.PublishDate(), p.ExpiryDate()) } func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool, @@ -1086,20 +1093,6 @@ func (p *Page) IsDraft() bool { return p.Draft } -func (p *Page) IsFuture() bool { - if p.PublishDate.IsZero() { - return false - } - return p.PublishDate.After(time.Now()) -} - -func (p *Page) IsExpired() bool { - if p.ExpiryDate.IsZero() { - return false - } - return p.ExpiryDate.Before(time.Now()) -} - func (p *Page) URL() string { if p.IsPage() && p.URLPath.URL != "" { @@ -1187,7 +1180,7 @@ func (p *Page) setContentInit(start bool) error { p.resetContent() } - for _, r := range p.Resources.ByType(pageResourceType) { + for _, r := range p.Resources().ByType(pageResourceType) { p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages) bp := r.(*Page) if start { @@ -1364,8 +1357,8 @@ func (p *Page) updateMetaData(frontmatter map[string]interface{}) error { p.Markup = cast.ToString(v) p.params[loki] = p.Markup case "weight": - p.Weight = cast.ToInt(v) - p.params[loki] = p.Weight + p.weight = cast.ToInt(v) + p.params[loki] = p.weight case "aliases": p.Aliases = cast.ToStringSlice(v) for _, alias := range p.Aliases { @@ -1655,7 +1648,7 @@ func (p *Page) Menus() PageMenus { if ok { link := p.RelPermalink() - me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight, URL: link} + me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.weight, URL: link} // Could be the name of the menu to attach it to mname, err := cast.ToStringE(ms) @@ -1685,7 +1678,7 @@ func (p *Page) Menus() PageMenus { } for name, menu := range menus { - menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name} + menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.weight, Menu: name} if menu != nil { p.s.Log.DEBUG.Printf("found menu: %q, in %q\n", name, p.title) ime, err := cast.ToStringMapE(menu) @@ -1750,7 +1743,7 @@ func (p *Page) absoluteSourceRef() string { func (p *Page) prepareLayouts() error { // TODO(bep): Check the IsRenderable logic. - if p.Kind == KindPage { + if p.Kind() == KindPage { if !p.IsRenderable() { self := "__" + p.UniqueID() err := p.s.TemplateHandler().AddLateTemplate(self, string(p.content())) @@ -1765,11 +1758,11 @@ func (p *Page) prepareLayouts() error { } func (p *Page) prepareData(s *Site) error { - if p.Kind != KindSection { + if p.Kind() != KindSection { var pages Pages p.data = make(map[string]interface{}) - switch p.Kind { + switch p.Kind() { case KindPage: case KindHome: pages = s.RegularPages @@ -1804,7 +1797,7 @@ func (p *Page) prepareData(s *Site) error { // A list of all KindTaxonomy pages with matching plural for _, p := range s.findPagesByKind(KindTaxonomy) { - if p.sections[0] == plural { + if p.(*Page).sections[0] == plural { pages = append(pages, p) } } @@ -1828,35 +1821,39 @@ func (p *Page) updatePageDates() { return } - if !p.Date.IsZero() { - if p.Lastmod.IsZero() { - p.Lastmod = p.Date - } - return - } else if !p.Lastmod.IsZero() { - if p.Date.IsZero() { - p.Date = p.Lastmod + // TODO(bep) page + + /* + if !p.Date.IsZero() { + if p.Lastmod.IsZero() { + p.Lastmod = p.Date + } + return + } else if !p.Lastmod().IsZero() { + if p.Date().IsZero() { + p.Date = p.Lastmod + } + return } - return - } - // Set it to the first non Zero date in children - var foundDate, foundLastMod bool + // Set it to the first non Zero date in children + var foundDate, foundLastMod bool - for _, child := range p.Pages { - if !child.Date.IsZero() { - p.Date = child.Date - foundDate = true - } - if !child.Lastmod.IsZero() { - p.Lastmod = child.Lastmod - foundLastMod = true - } + for _, child := range p.Pages { + childp := child.(*Page) + if !childp.Date.IsZero() { + p.Date = childp.Date + foundDate = true + } + if !childp.Lastmod.IsZero() { + p.Lastmod = childp.Lastmod + foundLastMod = true + } - if foundDate && foundLastMod { - break - } - } + if foundDate && foundLastMod { + break + } + }*/ } // copy creates a copy of this page with the lazy sync.Once vars reset @@ -1920,11 +1917,11 @@ func (p *Page) Lang() string { func (p *Page) isNewTranslation(candidate *Page) bool { - if p.Kind != candidate.Kind { + if p.Kind() != candidate.Kind() { return false } - if p.Kind == KindPage || p.Kind == kindUnknown { + if p.Kind() == KindPage || p.Kind() == kindUnknown { panic("Node type not currently supported for this op") } @@ -2080,7 +2077,7 @@ func (p *Page) kindFromSections() string { } func (p *Page) setValuesForKind(s *Site) { - if p.Kind == kindUnknown { + if p.Kind() == kindUnknown { // This is either a taxonomy list, taxonomy term or a section nodeType := p.kindFromSections() @@ -2088,10 +2085,10 @@ func (p *Page) setValuesForKind(s *Site) { panic(fmt.Sprintf("Unable to determine page kind from %q", p.sections)) } - p.Kind = nodeType + p.kind = nodeType } - switch p.Kind { + switch p.Kind() { case KindHome: p.URLPath.URL = "/" case KindPage: @@ -2110,13 +2107,13 @@ func (p *Page) pathOrTitle() string { return p.title } -func (p *Page) Next() *Page { +func (p *Page) Next() page.Page { // TODO Remove the deprecation notice (but keep PrevPage as an alias) Hugo 0.52 helpers.Deprecated("Page", ".Next", "Use .PrevPage (yes, not .NextPage).", false) return p.PrevPage } -func (p *Page) Prev() *Page { +func (p *Page) Prev() page.Page { // TODO Remove the deprecation notice (but keep NextPage as an alias) Hugo 0.52 helpers.Deprecated("Page", ".Prev", "Use .NextPage (yes, not .PrevPage).", false) return p.NextPage diff --git a/hugolib/pageCache_test.go b/hugolib/pageCache_test.go index 48f595f8690..988b265c320 100644 --- a/hugolib/pageCache_test.go +++ b/hugolib/pageCache_test.go @@ -27,7 +27,7 @@ func TestPageCache(t *testing.T) { c1 := newPageCache() changeFirst := func(p Pages) { - p[0].Description = "changed" + p[0].(*Page).Description = "changed" } var o1 uint64 @@ -66,7 +66,7 @@ func TestPageCache(t *testing.T) { assert.Equal(t, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)), c3) l2.Unlock() assert.NotNil(t, p3) - assert.Equal(t, p3[0].Description, "changed") + assert.Equal(t, p3[0].(*Page).Description, "changed") } }() } diff --git a/hugolib/pageGroup.go b/hugolib/pageGroup.go index 8aaa1018c94..b7426608d6d 100644 --- a/hugolib/pageGroup.go +++ b/hugolib/pageGroup.go @@ -19,6 +19,8 @@ import ( "sort" "strings" "time" + + "github.com/gohugoio/hugo/resources/page" ) // PageGroup represents a group of pages, grouped by the key. @@ -81,6 +83,9 @@ func (p PagesGroup) Reverse() PagesGroup { var ( errorType = reflect.TypeOf((*error)(nil)).Elem() pagePtrType = reflect.TypeOf((*Page)(nil)) + + // TODO(bep) page + pagesType = reflect.TypeOf(Pages{}) ) // GroupBy groups by the value in the given field or method name and with the given order. @@ -119,9 +124,9 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) { var tmp reflect.Value switch e := ft.(type) { case reflect.StructField: - tmp = reflect.MakeMap(reflect.MapOf(e.Type, reflect.SliceOf(pagePtrType))) + tmp = reflect.MakeMap(reflect.MapOf(e.Type, pagesType)) case reflect.Method: - tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), reflect.SliceOf(pagePtrType))) + tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), pagesType)) } for _, e := range p { @@ -137,7 +142,7 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) { continue } if !tmp.MapIndex(fv).IsValid() { - tmp.SetMapIndex(fv, reflect.MakeSlice(reflect.SliceOf(pagePtrType), 0, 0)) + tmp.SetMapIndex(fv, reflect.MakeSlice(pagesType, 0, 0)) } tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv)) } @@ -145,7 +150,7 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) { sortedKeys := sortKeys(tmp.MapKeys(), direction) r := make([]PageGroup, len(sortedKeys)) for i, k := range sortedKeys { - r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().([]*Page)} + r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)} } return r, nil @@ -167,11 +172,12 @@ func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) { var tmp reflect.Value var keyt reflect.Type for _, e := range p { - param := e.getParamToLower(key) + ep := e.(*Page) + param := ep.getParamToLower(key) if param != nil { if _, ok := param.([]string); !ok { keyt = reflect.TypeOf(param) - tmp = reflect.MakeMap(reflect.MapOf(keyt, reflect.SliceOf(pagePtrType))) + tmp = reflect.MakeMap(reflect.MapOf(keyt, pagesType)) break } } @@ -181,20 +187,21 @@ func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) { } for _, e := range p { - param := e.getParam(key, false) + ep := e.(*Page) + param := ep.getParam(key, false) if param == nil || reflect.TypeOf(param) != keyt { continue } v := reflect.ValueOf(param) if !tmp.MapIndex(v).IsValid() { - tmp.SetMapIndex(v, reflect.MakeSlice(reflect.SliceOf(pagePtrType), 0, 0)) + tmp.SetMapIndex(v, reflect.MakeSlice(pagesType, 0, 0)) } tmp.SetMapIndex(v, reflect.Append(tmp.MapIndex(v), reflect.ValueOf(e))) } var r []PageGroup for _, k := range sortKeys(tmp.MapKeys(), direction) { - r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().([]*Page)}) + r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)}) } return r, nil @@ -211,14 +218,14 @@ func (p Pages) groupByDateField(sorter func(p Pages) Pages, formatter func(p *Pa sp = sp.Reverse() } - date := formatter(sp[0]) + date := formatter(sp[0].(*Page)) var r []PageGroup r = append(r, PageGroup{Key: date, Pages: make(Pages, 0)}) r[0].Pages = append(r[0].Pages, sp[0]) i := 0 for _, e := range sp[1:] { - date = formatter(e) + date = formatter(e.(*Page)) if r[i].Key.(string) != date { r = append(r, PageGroup{Key: date}) i++ @@ -237,7 +244,7 @@ func (p Pages) GroupByDate(format string, order ...string) (PagesGroup, error) { return p.ByDate() } formatter := func(p *Page) string { - return p.Date.Format(format) + return p.Date().Format(format) } return p.groupByDateField(sorter, formatter, order...) } @@ -251,7 +258,7 @@ func (p Pages) GroupByPublishDate(format string, order ...string) (PagesGroup, e return p.ByPublishDate() } formatter := func(p *Page) string { - return p.PublishDate.Format(format) + return p.PublishDate().Format(format) } return p.groupByDateField(sorter, formatter, order...) } @@ -265,7 +272,7 @@ func (p Pages) GroupByExpiryDate(format string, order ...string) (PagesGroup, er return p.ByExpiryDate() } formatter := func(p *Page) string { - return p.ExpiryDate.Format(format) + return p.ExpiryDate().Format(format) } return p.groupByDateField(sorter, formatter, order...) } @@ -278,15 +285,17 @@ func (p Pages) GroupByParamDate(key string, format string, order ...string) (Pag sorter := func(p Pages) Pages { var r Pages for _, e := range p { - param := e.getParamToLower(key) + ep := e.(*Page) + param := ep.getParamToLower(key) if param != nil { if _, ok := param.(time.Time); ok { r = append(r, e) } } } - pdate := func(p1, p2 *Page) bool { - return p1.getParamToLower(key).(time.Time).Unix() < p2.getParamToLower(key).(time.Time).Unix() + pdate := func(p1, p2 page.Page) bool { + p1p, p2p := p1.(*Page), p2.(*Page) + return p1p.getParamToLower(key).(time.Time).Unix() < p2p.getParamToLower(key).(time.Time).Unix() } pageBy(pdate).Sort(r) return r diff --git a/hugolib/pageGroup_test.go b/hugolib/pageGroup_test.go index febcb3c1c0d..3a06efcbe3e 100644 --- a/hugolib/pageGroup_test.go +++ b/hugolib/pageGroup_test.go @@ -45,10 +45,10 @@ func preparePageGroupTestPages(t *testing.T) Pages { if err != nil { t.Fatalf("failed to prepare test page %s", src.path) } - p.Weight = src.weight - p.Date = cast.ToTime(src.date) - p.PublishDate = cast.ToTime(src.date) - p.ExpiryDate = cast.ToTime(src.date) + p.weight = src.weight + p.DDate = cast.ToTime(src.date) + p.DPublishDate = cast.ToTime(src.date) + p.DExpiryDate = cast.ToTime(src.date) p.params["custom_param"] = src.param p.params["custom_date"] = cast.ToTime(src.date) pages = append(pages, p) @@ -268,9 +268,9 @@ func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) { func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) { t.Parallel() pages := preparePageGroupTestPages(t) - delete(pages[1].params, "custom_param") - delete(pages[3].params, "custom_param") - delete(pages[4].params, "custom_param") + delete(pages[1].(*Page).params, "custom_param") + delete(pages[3].(*Page).params, "custom_param") + delete(pages[4].(*Page).params, "custom_param") expect := PagesGroup{ {Key: "foo", Pages: Pages{pages[0], pages[2]}}, diff --git a/hugolib/pageSort.go b/hugolib/pageSort.go deleted file mode 100644 index 454beb473e9..00000000000 --- a/hugolib/pageSort.go +++ /dev/null @@ -1,332 +0,0 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "github.com/gohugoio/hugo/helpers" - - "sort" - - "github.com/spf13/cast" -) - -var spc = newPageCache() - -/* - * Implementation of a custom sorter for Pages - */ - -// A pageSorter implements the sort interface for Pages -type pageSorter struct { - pages Pages - by pageBy -} - -// pageBy is a closure used in the Sort.Less method. -type pageBy func(p1, p2 *Page) bool - -// Sort stable sorts the pages given the receiver's sort order. -func (by pageBy) Sort(pages Pages) { - ps := &pageSorter{ - pages: pages, - by: by, // The Sort method's receiver is the function (closure) that defines the sort order. - } - sort.Stable(ps) -} - -// defaultPageSort is the default sort for pages in Hugo: -// Order by Weight, Date, LinkTitle and then full file path. -var defaultPageSort = func(p1, p2 *Page) bool { - if p1.Weight == p2.Weight { - if p1.Date.Unix() == p2.Date.Unix() { - if p1.LinkTitle() == p2.LinkTitle() { - return (p1.FullFilePath() < p2.FullFilePath()) - } - return (p1.LinkTitle() < p2.LinkTitle()) - } - return p1.Date.Unix() > p2.Date.Unix() - } - - if p2.Weight == 0 { - return true - } - - if p1.Weight == 0 { - return false - } - - return p1.Weight < p2.Weight -} - -var languagePageSort = func(p1, p2 *Page) bool { - if p1.Language().Weight == p2.Language().Weight { - if p1.Date.Unix() == p2.Date.Unix() { - if p1.LinkTitle() == p2.LinkTitle() { - return (p1.FullFilePath() < p2.FullFilePath()) - } - return (p1.LinkTitle() < p2.LinkTitle()) - } - return p1.Date.Unix() > p2.Date.Unix() - } - - if p2.Language().Weight == 0 { - return true - } - - if p1.Language().Weight == 0 { - return false - } - - return p1.Language().Weight < p2.Language().Weight -} - -func (ps *pageSorter) Len() int { return len(ps.pages) } -func (ps *pageSorter) Swap(i, j int) { ps.pages[i], ps.pages[j] = ps.pages[j], ps.pages[i] } - -// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter. -func (ps *pageSorter) Less(i, j int) bool { return ps.by(ps.pages[i], ps.pages[j]) } - -// Sort sorts the pages by the default sort order defined: -// Order by Weight, Date, LinkTitle and then full file path. -func (p Pages) Sort() { - // Remove in Hugo 0.51 - helpers.Deprecated("Pages", "Sort", "Use .ByWeight", true) - p.sort() -} - -func (p Pages) sort() { - pageBy(defaultPageSort).Sort(p) -} - -// Limit limits the number of pages returned to n. -func (p Pages) Limit(n int) Pages { - if len(p) > n { - return p[0:n] - } - return p -} - -// ByWeight sorts the Pages by weight and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByWeight() Pages { - const key = "pageSort.ByWeight" - pages, _ := spc.get(key, pageBy(defaultPageSort).Sort, p) - return pages -} - -// ByTitle sorts the Pages by title and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByTitle() Pages { - - const key = "pageSort.ByTitle" - - title := func(p1, p2 *Page) bool { - return p1.title < p2.title - } - - pages, _ := spc.get(key, pageBy(title).Sort, p) - return pages -} - -// ByLinkTitle sorts the Pages by link title and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByLinkTitle() Pages { - - const key = "pageSort.ByLinkTitle" - - linkTitle := func(p1, p2 *Page) bool { - return p1.LinkTitle() < p2.LinkTitle() - } - - pages, _ := spc.get(key, pageBy(linkTitle).Sort, p) - - return pages -} - -// ByDate sorts the Pages by date and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByDate() Pages { - - const key = "pageSort.ByDate" - - date := func(p1, p2 *Page) bool { - return p1.Date.Unix() < p2.Date.Unix() - } - - pages, _ := spc.get(key, pageBy(date).Sort, p) - - return pages -} - -// ByPublishDate sorts the Pages by publish date and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByPublishDate() Pages { - - const key = "pageSort.ByPublishDate" - - pubDate := func(p1, p2 *Page) bool { - return p1.PublishDate.Unix() < p2.PublishDate.Unix() - } - - pages, _ := spc.get(key, pageBy(pubDate).Sort, p) - - return pages -} - -// ByExpiryDate sorts the Pages by publish date and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByExpiryDate() Pages { - - const key = "pageSort.ByExpiryDate" - - expDate := func(p1, p2 *Page) bool { - return p1.ExpiryDate.Unix() < p2.ExpiryDate.Unix() - } - - pages, _ := spc.get(key, pageBy(expDate).Sort, p) - - return pages -} - -// ByLastmod sorts the Pages by the last modification date and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByLastmod() Pages { - - const key = "pageSort.ByLastmod" - - date := func(p1, p2 *Page) bool { - return p1.Lastmod.Unix() < p2.Lastmod.Unix() - } - - pages, _ := spc.get(key, pageBy(date).Sort, p) - - return pages -} - -// ByLength sorts the Pages by length and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByLength() Pages { - - const key = "pageSort.ByLength" - - length := func(p1, p2 *Page) bool { - return len(p1.content()) < len(p2.content()) - } - - pages, _ := spc.get(key, pageBy(length).Sort, p) - - return pages -} - -// ByLanguage sorts the Pages by the language's Weight. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByLanguage() Pages { - - const key = "pageSort.ByLanguage" - - pages, _ := spc.get(key, pageBy(languagePageSort).Sort, p) - - return pages -} - -// Reverse reverses the order in Pages and returns a copy. -// -// Adjacent invocations on the same receiver will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) Reverse() Pages { - const key = "pageSort.Reverse" - - reverseFunc := func(pages Pages) { - for i, j := 0, len(pages)-1; i < j; i, j = i+1, j-1 { - pages[i], pages[j] = pages[j], pages[i] - } - } - - pages, _ := spc.get(key, reverseFunc, p) - - return pages -} - -// ByParam sorts the pages according to the given page Params key. -// -// Adjacent invocations on the same receiver with the same paramsKey will return a cached result. -// -// This may safely be executed in parallel. -func (p Pages) ByParam(paramsKey interface{}) Pages { - paramsKeyStr := cast.ToString(paramsKey) - key := "pageSort.ByParam." + paramsKeyStr - - paramsKeyComparator := func(p1, p2 *Page) bool { - v1, _ := p1.Param(paramsKeyStr) - v2, _ := p2.Param(paramsKeyStr) - - if v1 == nil { - return false - } - - if v2 == nil { - return true - } - - isNumeric := func(v interface{}) bool { - switch v.(type) { - case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64: - return true - default: - return false - } - } - - if isNumeric(v1) && isNumeric(v2) { - return cast.ToFloat64(v1) < cast.ToFloat64(v2) - } - - s1 := cast.ToString(v1) - s2 := cast.ToString(v2) - - return s1 < s2 - } - - pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p) - - return pages -} diff --git a/hugolib/pageSort_test.go b/hugolib/pageSort_test.go index 915947fd3af..2f321e6e812 100644 --- a/hugolib/pageSort_test.go +++ b/hugolib/pageSort_test.go @@ -19,6 +19,8 @@ import ( "testing" "time" + "github.com/gohugoio/hugo/resources/resource" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -38,18 +40,18 @@ func TestDefaultSort(t *testing.T) { setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p) p.sort() - assert.Equal(t, 1, p[0].Weight) + assert.Equal(t, 1, p[0].Weight()) // Consider zero weight, issue #2673 setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p) p.sort() - assert.Equal(t, 1, p[0].Weight) + assert.Equal(t, 1, p[0].Weight()) // next by date setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p) p.sort() - assert.Equal(t, d1, p[0].Date) + assert.Equal(t, d1, p[0].Date()) // finally by link title setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p) @@ -67,12 +69,13 @@ func TestSortByLinkTitle(t *testing.T) { pages := createSortTestPages(s, 6) for i, p := range pages { + pp := p.(*Page) if i < 5 { - p.title = fmt.Sprintf("title%d", i) + pp.title = fmt.Sprintf("title%d", i) } if i > 2 { - p.linkTitle = fmt.Sprintf("linkTitle%d", i) + pp.linkTitle = fmt.Sprintf("linkTitle%d", i) } } @@ -104,14 +107,14 @@ func TestSortByN(t *testing.T) { sortFunc func(p Pages) Pages assertFunc func(p Pages) bool }{ - {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight == 1 }}, - {(Pages).ByTitle, func(p Pages) bool { return p[0].title == "ab" }}, + {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }}, + {(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }}, {(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }}, - {(Pages).ByDate, func(p Pages) bool { return p[0].Date == d4 }}, - {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate == d4 }}, - {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate == d4 }}, - {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod == d3 }}, - {(Pages).ByLength, func(p Pages) bool { return p[0].content() == "b_content" }}, + {(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }}, + {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }}, + {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }}, + {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }}, + {(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len("b_content") }}, } { setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p) @@ -140,11 +143,11 @@ func TestPageSortReverse(t *testing.T) { t.Parallel() s := newTestSite(t) p1 := createSortTestPages(s, 10) - assert.Equal(t, 0, p1[0].fuzzyWordCount) - assert.Equal(t, 9, p1[9].fuzzyWordCount) + assert.Equal(t, 0, p1[0].(*Page).fuzzyWordCount) + assert.Equal(t, 9, p1[9].(*Page).fuzzyWordCount) p2 := p1.Reverse() - assert.Equal(t, 9, p2[0].fuzzyWordCount) - assert.Equal(t, 0, p2[9].fuzzyWordCount) + assert.Equal(t, 9, p2[0].(*Page).fuzzyWordCount) + assert.Equal(t, 0, p2[9].(*Page).fuzzyWordCount) // cached assert.True(t, pagesEqual(p2, p1.Reverse())) } @@ -155,7 +158,7 @@ func TestPageSortByParam(t *testing.T) { s := newTestSite(t) unsorted := createSortTestPages(s, 10) - delete(unsorted[9].params, "arbitrarily") + delete(unsorted[9].Params(), "arbitrarily") firstSetValue, _ := unsorted[0].Param(k) secondSetValue, _ := unsorted[1].Param(k) @@ -234,22 +237,25 @@ func BenchmarkSortByWeightAndReverse(b *testing.B) { func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) { for i := range dates { - pages[i].Date = dates[i] - pages[i].Lastmod = dates[i] - pages[i].Weight = weights[i] - pages[i].title = titles[i] + this := pages[i].(*Page) + other := pages[len(dates)-1-i].(*Page) + + this.DDate = dates[i] + this.DLastMod = dates[i] + this.weight = weights[i] + this.title = titles[i] // make sure we compare apples and ... apples ... - pages[len(dates)-1-i].linkTitle = pages[i].title + "l" - pages[len(dates)-1-i].PublishDate = dates[i] - pages[len(dates)-1-i].ExpiryDate = dates[i] - pages[len(dates)-1-i].workContent = []byte(titles[i] + "_content") + other.linkTitle = this.Title() + "l" + other.DPublishDate = dates[i] + other.DExpiryDate = dates[i] + other.workContent = []byte(titles[i] + "_content") } - lastLastMod := pages[2].Lastmod - pages[2].Lastmod = pages[1].Lastmod - pages[1].Lastmod = lastLastMod + lastLastMod := pages[2].Lastmod() + pages[2].(*Page).DLastMod = pages[1].Lastmod() + pages[1].(*Page).DLastMod = lastLastMod for _, p := range pages { - p.resetContent() + p.(*Page).resetContent() } } @@ -271,7 +277,7 @@ func createSortTestPages(s *Site, num int) Pages { w = 10 } p.fuzzyWordCount = i - p.Weight = w + p.weight = w p.Description = "initial" pages[i] = p diff --git a/hugolib/page_output.go b/hugolib/page_output.go index 0a3eef9a6a5..0506a041081 100644 --- a/hugolib/page_output.go +++ b/hugolib/page_output.go @@ -230,7 +230,7 @@ func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) { // always be of the same length, but may contain different elements. func (p *PageOutput) deleteResource(i int) { p.resources = append(p.resources[:i], p.resources[i+1:]...) - p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...) + p.Page.resources = append(p.Page.resources[:i], p.Page.resources[i+1:]...) } @@ -241,14 +241,14 @@ func (p *PageOutput) Resources() resource.Resources { // base folder. ff := p.outputFormats[0] if p.outputFormat.Path == ff.Path { - p.resources = p.Page.Resources + p.resources = p.Page.resources return } // Clone it with new base. - resources := make(resource.Resources, len(p.Page.Resources)) + resources := make(resource.Resources, len(p.Page.Resources())) - for i, r := range p.Page.Resources { + for i, r := range p.Page.Resources() { if c, ok := r.(resource.Cloner); ok { // Clone the same resource with a new target. resources[i] = c.WithNewBase(p.outputFormat.Path) diff --git a/hugolib/page_paths.go b/hugolib/page_paths.go index 9de7b0764fc..a115ccf57e2 100644 --- a/hugolib/page_paths.go +++ b/hugolib/page_paths.go @@ -74,7 +74,7 @@ type targetPathDescriptor struct { // and URLs for this Page. func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) { if p.targetPathDescriptorPrototype == nil { - panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.title, p.Kind)) + panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.Title(), p.Kind())) } d := *p.targetPathDescriptorPrototype d.Type = t @@ -84,7 +84,7 @@ func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor func (p *Page) initTargetPathDescriptor() error { d := &targetPathDescriptor{ PathSpec: p.s.PathSpec, - Kind: p.Kind, + Kind: p.Kind(), Sections: p.sections, UglyURLs: p.s.Info.uglyURLs(p), Dir: filepath.ToSlash(p.Dir()), @@ -107,7 +107,7 @@ func (p *Page) initTargetPathDescriptor() error { // the permalink configuration values are likely to be redundant, e.g. // naively expanding /category/:slug/ would give /category/categories/ for // the "categories" KindTaxonomyTerm. - if p.Kind == KindPage || p.Kind == KindTaxonomy { + if p.Kind() == KindPage || p.Kind() == KindTaxonomy { if override, ok := p.Site.Permalinks[p.Section()]; ok { opath, err := override.Expand(p) if err != nil { @@ -127,7 +127,7 @@ func (p *Page) initTargetPathDescriptor() error { func (p *Page) initURLs() error { if len(p.outputFormats) == 0 { - p.outputFormats = p.s.outputFormats[p.Kind] + p.outputFormats = p.s.outputFormats[p.Kind()] } target := filepath.ToSlash(p.createRelativeTargetPath()) rel := p.s.PathSpec.URLizeFilename(target) @@ -278,7 +278,7 @@ func createTargetPath(d targetPathDescriptor) string { func (p *Page) createRelativeTargetPath() string { if len(p.outputFormats) == 0 { - if p.Kind == kindUnknown { + if p.Kind() == kindUnknown { panic(fmt.Sprintf("Page %q has unknown kind", p.title)) } panic(fmt.Sprintf("Page %q missing output format(s)", p.title)) diff --git a/hugolib/page_resource.go b/hugolib/page_resource.go index 201076e8b0b..de5045ae01d 100644 --- a/hugolib/page_resource.go +++ b/hugolib/page_resource.go @@ -14,10 +14,14 @@ package hugolib import ( + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" ) var ( - _ resource.Resource = (*Page)(nil) - _ resource.Resource = (*PageOutput)(nil) + _ resource.Resource = (*Page)(nil) + _ page.Page = (*Page)(nil) + _ resource.Resource = (*PageOutput)(nil) + _ page.Page = (*PageOutput)(nil) + _ resource.LengthProvider = (*Page)(nil) ) diff --git a/hugolib/page_test.go b/hugolib/page_test.go index 1db1d3522f2..30c05771e83 100644 --- a/hugolib/page_test.go +++ b/hugolib/page_test.go @@ -520,8 +520,8 @@ func checkPageType(t *testing.T, page *Page, pageType string) { } func checkPageDate(t *testing.T, page *Page, time time.Time) { - if page.Date != time { - t.Fatalf("Page date is: %s. Expected: %s", page.Date, time) + if page.Date() != time { + t.Fatalf("Page date is: %s. Expected: %s", page.Date(), time) } } @@ -624,7 +624,7 @@ func testAllMarkdownEnginesForPages(t *testing.T, func TestCreateNewPage(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) // issue #2290: Path is relative to the content dir and will continue to be so. require.Equal(t, filepath.FromSlash(fmt.Sprintf("p0.%s", ext)), p.Path()) @@ -646,7 +646,7 @@ func TestCreateNewPage(t *testing.T) { func TestPageWithDelimiter(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "
Summary Next Line
\n\nSome more text
\n"), ext) checkPageSummary(t, p, normalizeExpected(ext, "Summary Next Line
"), ext) @@ -668,7 +668,7 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) if p.Summary() != template.HTML( "The best static site generator.1
") { @@ -694,7 +694,7 @@ weight: %d Simple Page With Some Date` hasDate := func(p *Page) bool { - return p.Date.Year() == 2017 + return p.Date().Year() == 2017 } datePage := func(field string, weight int) string { @@ -705,7 +705,7 @@ Simple Page With Some Date` assertFunc := func(t *testing.T, ext string, pages Pages) { assert.True(len(pages) > 0) for _, p := range pages { - assert.True(hasDate(p)) + assert.True(hasDate(p.(*Page))) } } @@ -734,7 +734,7 @@ title: Raw s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) require.Equal(t, p.RawContent(), "**Raw**") @@ -743,7 +743,7 @@ title: Raw func TestPageWithShortCodeInSummary(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "Summary Next Line. . More text here.
Some more text
")) checkPageSummary(t, p, "Summary Next Line. . More text here. Some more text") @@ -756,7 +756,7 @@ func TestPageWithShortCodeInSummary(t *testing.T) { func TestPageWithEmbeddedScriptTag(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if ext == "ad" || ext == "rst" { // TOD(bep) return @@ -777,7 +777,7 @@ func TestPageWithAdditionalExtension(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageContent(t, p, "first line.
\nsecond line.
fourth line.
\n") } @@ -792,7 +792,7 @@ func TestTableOfContents(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageContent(t, p, "\n\nFor some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.
\n\nI have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.
\n\nI remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath–as swift as the passage of light–would leap after me from the pit about\nthe cylinder and strike me down. ## BB
\n\n“You’re a great Granser,” he cried delightedly, “always making believe them little marks mean something.”
\n") checkPageTOC(t, p, "") @@ -801,7 +801,7 @@ func TestTableOfContents(t *testing.T) { func TestPageWithMoreTag(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "Summary Same Line
\n\nSome more text
\n")) checkPageSummary(t, p, normalizeExpected(ext, "Summary Same Line
")) @@ -815,7 +815,7 @@ func TestPageWithMoreTag(t *testing.T) { func TestPageWithMoreTagOnlySummary(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkTruncation(t, p, false, "page with summary delimiter at end") } @@ -826,7 +826,7 @@ func TestPageWithMoreTagOnlySummary(t *testing.T) { func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) require.Contains(t, p.Summary(), "Happy new year everyone!") require.NotContains(t, p.Summary(), "User interface") } @@ -855,7 +855,7 @@ func TestPageWithDate(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z") checkPageDate(t, p, d) @@ -908,13 +908,13 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) { assrt.Len(enSite.RegularPages, 1) // 2018-03-11 is the Git author date for testsite/content/first-post.md - assrt.Equal("2018-03-11", enSite.RegularPages[0].Lastmod.Format("2006-01-02")) + assrt.Equal("2018-03-11", enSite.RegularPages[0].Lastmod().Format("2006-01-02")) nnSite := h.Sites[1] assrt.Len(nnSite.RegularPages, 1) // 2018-08-11 is the Git author date for testsite/content_nn/first-post.md - assrt.Equal("2018-08-11", nnSite.RegularPages[0].Lastmod.Format("2006-01-02")) + assrt.Equal("2018-08-11", nnSite.RegularPages[0].Lastmod().Format("2006-01-02")) } @@ -955,22 +955,22 @@ Content assrt.Len(s.RegularPages, 2) - noSlug := s.RegularPages[0] - slug := s.RegularPages[1] + noSlug := s.RegularPages[0].(*Page) + slug := s.RegularPages[1].(*Page) - assrt.Equal(28, noSlug.Lastmod.Day()) + assrt.Equal(28, noSlug.Lastmod().Day()) switch strings.ToLower(dateHandler) { case ":filename": - assrt.False(noSlug.Date.IsZero()) - assrt.False(slug.Date.IsZero()) - assrt.Equal(2012, noSlug.Date.Year()) - assrt.Equal(2012, slug.Date.Year()) + assrt.False(noSlug.Date().IsZero()) + assrt.False(slug.Date().IsZero()) + assrt.Equal(2012, noSlug.Date().Year()) + assrt.Equal(2012, slug.Date().Year()) assrt.Equal("noslug", noSlug.Slug) assrt.Equal("aslug", slug.Slug) case ":filemodtime": - assrt.Equal(c1fi.ModTime().Year(), noSlug.Date.Year()) - assrt.Equal(c2fi.ModTime().Year(), slug.Date.Year()) + assrt.Equal(c1fi.ModTime().Year(), noSlug.Date().Year()) + assrt.Equal(c2fi.ModTime().Year(), slug.Date().Year()) fallthrough default: assrt.Equal("", noSlug.Slug) @@ -985,7 +985,7 @@ Content func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 8 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 8, p.WordCount()) } @@ -999,7 +999,7 @@ func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) { settings := map[string]interface{}{"hasCJKLanguage": true} assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 15 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 15, p.WordCount()) } @@ -1012,7 +1012,7 @@ func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) { settings := map[string]interface{}{"hasCJKLanguage": true} assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 74 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount()) } @@ -1033,7 +1033,7 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) { } assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 75 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount()) } @@ -1051,7 +1051,7 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) { func TestWordCount(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 483 { t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount()) } @@ -1473,8 +1473,8 @@ func TestTranslationKey(t *testing.T) { home, _ := s.Info.Home() assert.NotNil(home) assert.Equal("home", home.TranslationKey()) - assert.Equal("page/k1", s.RegularPages[0].TranslationKey()) - p2 := s.RegularPages[1] + assert.Equal("page/k1", s.RegularPages[0].(*Page).TranslationKey()) + p2 := s.RegularPages[1].(*Page) assert.Equal("page/sect/simple", p2.TranslationKey()) @@ -1492,7 +1492,7 @@ func TestChompBOM(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageTitle(t, p, "Simple") } @@ -1804,7 +1804,7 @@ tags: } - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) if uglyURLs { require.Equal(t, "/post/test0.dot.html", p.RelPermalink()) } else { diff --git a/hugolib/page_time_integration_test.go b/hugolib/page_time_integration_test.go index f180afa5e2e..5e489373287 100644 --- a/hugolib/page_time_integration_test.go +++ b/hugolib/page_time_integration_test.go @@ -25,11 +25,6 @@ import ( ) const ( - pageWithInvalidDate = `--- -date: 2010-05-02_15:29:31+08:00 ---- -Page With Invalid Date (replace T with _ for RFC 3339)` - pageWithDateRFC3339 = `--- date: 2010-05-02T15:29:31+08:00 --- @@ -91,15 +86,6 @@ date: 02 May 2010 15:29 PST Page With Date HugoLong` ) -func TestDegenerateDateFrontMatter(t *testing.T) { - t.Parallel() - s := newTestSite(t) - p, _ := s.newPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date") - if p.Date != *new(time.Time) { - t.Fatalf("Date should be set to time.Time zero value. Got: %s", p.Date) - } -} - func TestParsingDateInFrontMatter(t *testing.T) { t.Parallel() s := newTestSite(t) @@ -142,8 +128,8 @@ func TestParsingDateInFrontMatter(t *testing.T) { if err != nil { t.Fatalf("Expected to be able to parse page.") } - if !dt.Equal(p.Date) { - t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n Got: %s. Diff: %s\n internal: %#v\n %#v", test.buf, dt, p.Date, dt.Sub(p.Date), dt, p.Date) + if !dt.Equal(p.Date()) { + t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n Got: %s. Diff: %s\n internal: %#v\n %#v", test.buf, dt, p.Date(), dt.Sub(p.Date()), dt, p.Date()) } } } diff --git a/hugolib/pagebundler_handlers.go b/hugolib/pagebundler_handlers.go index 2df1f87656f..b12ec8a3d73 100644 --- a/hugolib/pagebundler_handlers.go +++ b/hugolib/pagebundler_handlers.go @@ -231,17 +231,17 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler { pageResource.resourcePath = filepath.ToSlash(childCtx.target) pageResource.parent = p } - p.Resources = append(p.Resources, res.resource) + p.resources = append(p.resources, res.resource) } } - sort.SliceStable(p.Resources, func(i, j int) bool { - if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() { + sort.SliceStable(p.Resources(), func(i, j int) bool { + if p.resources[i].ResourceType() < p.resources[j].ResourceType() { return true } - p1, ok1 := p.Resources[i].(*Page) - p2, ok2 := p.Resources[j].(*Page) + p1, ok1 := p.resources[i].(*Page) + p2, ok2 := p.resources[j].(*Page) if ok1 != ok2 { return ok2 @@ -251,12 +251,12 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler { return defaultPageSort(p1, p2) } - return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink() + return p.resources[i].RelPermalink() < p.resources[j].RelPermalink() }) // Assign metadata from front matter if set if len(p.resourcesMetadata) > 0 { - resources.AssignMetadata(p.resourcesMetadata, p.Resources...) + resources.AssignMetadata(p.resourcesMetadata, p.Resources()...) } } diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index ab047205911..78edc57fe8d 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -138,13 +138,13 @@ func TestPageBundlerSiteRegular(t *testing.T) { unicodeBundle := s.getPage(KindPage, "c/bundle/index.md") assert.NotNil(unicodeBundle) - pageResources := leafBundle1.Resources.ByType(pageResourceType) + pageResources := leafBundle1.Resources().ByType(pageResourceType) assert.Len(pageResources, 2) firstPage := pageResources[0].(*Page) secondPage := pageResources[1].(*Page) assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle()) assert.Contains(firstPage.content(), "TheContent") - assert.Equal(6, len(leafBundle1.Resources)) + assert.Equal(6, len(leafBundle1.Resources())) // Verify shortcode in bundled page assert.Contains(secondPage.content(), filepath.FromSlash("MyShort in b/my-bundle/2.md")) @@ -157,7 +157,7 @@ func TestPageBundlerSiteRegular(t *testing.T) { assert.Equal(secondPage, pageResources.GetMatch("2*")) assert.Nil(pageResources.GetMatch("doesnotexist*")) - imageResources := leafBundle1.Resources.ByType("image") + imageResources := leafBundle1.Resources().ByType("image") assert.Equal(3, len(imageResources)) image := imageResources[0] @@ -170,7 +170,7 @@ func TestPageBundlerSiteRegular(t *testing.T) { th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content") // Custom media type defined in site config. - assert.Len(leafBundle1.Resources.ByType("bepsays"), 1) + assert.Len(leafBundle1.Resources().ByType("bepsays"), 1) relPermalinker := func(s string) string { return fmt.Sprintf(s, relURLBase) @@ -286,9 +286,9 @@ func TestPageBundlerSiteMultilingual(t *testing.T) { // See https://github.com/gohugoio/hugo/issues/4295 // Every resource should have its Name prefixed with its base folder. - cBundleResources := bundleWithSubPath.Resources.Match("c/**") + cBundleResources := bundleWithSubPath.Resources().Match("c/**") assert.Equal(4, len(cBundleResources)) - bundlePage := bundleWithSubPath.Resources.GetMatch("c/page*") + bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*") assert.NotNil(bundlePage) assert.IsType(&Page{}, bundlePage) @@ -334,10 +334,10 @@ func TestMultilingualDisableLanguage(t *testing.T) { // No nn pages assert.Equal(16, len(s.AllPages)) for _, p := range s.rawAllPages { - assert.True(p.Lang() != "nn") + assert.True(p.(*Page).Lang() != "nn") } for _, p := range s.AllPages { - assert.True(p.Lang() != "nn") + assert.True(p.(*Page).Lang() != "nn") } } @@ -361,8 +361,8 @@ func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { assert.Equal(7, len(s.RegularPages)) a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md") assert.NotNil(a1Bundle) - assert.Equal(2, len(a1Bundle.Resources)) - assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType))) + assert.Equal(2, len(a1Bundle.Resources())) + assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType))) th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent") th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent") @@ -430,7 +430,7 @@ HEADLESS {{< myShort >}} assert.Equal("", headless.Permalink()) assert.Contains(headless.content(), "HEADLESS SHORTCODE") - headlessResources := headless.Resources + headlessResources := headless.Resources() assert.Equal(3, len(headlessResources)) assert.Equal(2, len(headlessResources.Match("l*"))) pageResource := headlessResources.GetMatch("p*") diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index 78325344b60..e055140c067 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -21,6 +21,7 @@ import ( "github.com/gohugoio/hugo/cache" "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/resources/page" ) // PageCollections contains the page collections for a site. @@ -71,7 +72,7 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) { return nil, fmt.Errorf("page reference %q is ambiguous", ref) } -var ambiguityFlag = &Page{Kind: kindUnknown, title: "ambiguity flag"} +var ambiguityFlag = &Page{kind: kindUnknown, title: "ambiguity flag"} func (c *PageCollections) refreshPageCaches() { c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages) @@ -81,7 +82,7 @@ func (c *PageCollections) refreshPageCaches() { indexLoader := func() (map[string]interface{}, error) { index := make(map[string]interface{}) - add := func(ref string, p *Page) { + add := func(ref string, p page.Page) { existing := index[ref] if existing == nil { index[ref] = p @@ -92,7 +93,8 @@ func (c *PageCollections) refreshPageCaches() { for _, pageCollection := range []Pages{c.RegularPages, c.headlessPages} { for _, p := range pageCollection { - sourceRef := p.absoluteSourceRef() + pp := p.(*Page) + sourceRef := pp.absoluteSourceRef() if sourceRef != "" { // index the canonical ref @@ -101,9 +103,9 @@ func (c *PageCollections) refreshPageCaches() { } // Ref/Relref supports this potentially ambiguous lookup. - add(p.LogicalName(), p) + add(pp.LogicalName(), p) - translationBaseName := p.TranslationBaseName() + translationBaseName := pp.TranslationBaseName() dir, _ := path.Split(sourceRef) dir = strings.TrimSuffix(dir, "/") @@ -124,12 +126,13 @@ func (c *PageCollections) refreshPageCaches() { for _, p := range c.indexPages { // index the canonical, unambiguous ref for any backing file // e.g. /section/_index.md - sourceRef := p.absoluteSourceRef() + pp := p.(*Page) + sourceRef := pp.absoluteSourceRef() if sourceRef != "" { add(sourceRef, p) } - ref := path.Join(p.sections...) + ref := path.Join(pp.sections...) // index the canonical, unambiguous virtual ref // e.g. /section @@ -265,7 +268,7 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) { func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages { var pages Pages for _, p := range inPages { - if p.Kind == kind { + if p.Kind() == kind { pages = append(pages, p) } } @@ -274,8 +277,8 @@ func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages { func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page { for _, p := range inPages { - if p.Kind == kind { - return p + if p.Kind() == kind { + return p.(*Page) } } return nil @@ -284,7 +287,7 @@ func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page func (*PageCollections) findPagesByKindNotIn(kind string, inPages Pages) Pages { var pages Pages for _, p := range inPages { - if p.Kind != kind { + if p.Kind() != kind { pages = append(pages, p) } } @@ -301,7 +304,7 @@ func (c *PageCollections) addPage(page *Page) { func (c *PageCollections) removePageFilename(filename string) { if i := c.rawAllPages.findPagePosByFilename(filename); i >= 0 { - c.clearResourceCacheForPage(c.rawAllPages[i]) + c.clearResourceCacheForPage(c.rawAllPages[i].(*Page)) c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...) } @@ -309,7 +312,7 @@ func (c *PageCollections) removePageFilename(filename string) { func (c *PageCollections) removePage(page *Page) { if i := c.rawAllPages.findPagePos(page); i >= 0 { - c.clearResourceCacheForPage(c.rawAllPages[i]) + c.clearResourceCacheForPage(c.rawAllPages[i].(*Page)) c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...) } @@ -319,8 +322,9 @@ func (c *PageCollections) findPagesByShortcode(shortcode string) Pages { var pages Pages for _, p := range c.rawAllPages { - if p.shortcodeState != nil { - if _, ok := p.shortcodeState.nameSet[shortcode]; ok { + pp := p.(*Page) + if pp.shortcodeState != nil { + if _, ok := pp.shortcodeState.nameSet[shortcode]; ok { pages = append(pages, p) } } @@ -335,7 +339,7 @@ func (c *PageCollections) replacePage(page *Page) { } func (c *PageCollections) clearResourceCacheForPage(page *Page) { - if len(page.Resources) > 0 { + if len(page.Resources()) > 0 { page.s.ResourceSpec.DeleteCacheByPrefix(page.relTargetPathBase) } } diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index 2f8b3149044..d2796d3a466 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -114,7 +114,7 @@ func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.As default: assert.NoError(err, errorMsg) assert.NotNil(p, errorMsg) - assert.Equal(t.kind, p.Kind, errorMsg) + assert.Equal(t.kind, p.Kind(), errorMsg) assert.Equal(t.expectedTitle, p.title, errorMsg) } } diff --git a/hugolib/pagemeta/page_frontmatter.go b/hugolib/pagemeta/page_frontmatter.go index b67ffbc05a0..6a303906abe 100644 --- a/hugolib/pagemeta/page_frontmatter.go +++ b/hugolib/pagemeta/page_frontmatter.go @@ -300,7 +300,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.dateHandler, err = f.createDateHandler(f.fmConfig.date, func(d *FrontMatterDescriptor, t time.Time) { - d.Dates.Date = t + d.Dates.DDate = t setParamIfNotSet(fmDate, t, d) }); err != nil { return err @@ -309,7 +309,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.lastModHandler, err = f.createDateHandler(f.fmConfig.lastmod, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmLastmod, t, d) - d.Dates.Lastmod = t + d.Dates.DLastMod = t }); err != nil { return err } @@ -317,7 +317,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.publishDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmPubDate, t, d) - d.Dates.PublishDate = t + d.Dates.DPublishDate = t }); err != nil { return err } @@ -325,7 +325,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.expiryDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmExpiryDate, t, d) - d.Dates.ExpiryDate = t + d.Dates.DExpiryDate = t }); err != nil { return err } diff --git a/hugolib/pagemeta/page_frontmatter_test.go b/hugolib/pagemeta/page_frontmatter_test.go index 03f4c2f84a4..c4f7d40038f 100644 --- a/hugolib/pagemeta/page_frontmatter_test.go +++ b/hugolib/pagemeta/page_frontmatter_test.go @@ -143,13 +143,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) { } d.Frontmatter["date"] = d2 assert.NoError(handler.HandleDates(d)) - assert.Equal(d1, d.Dates.Date) + assert.Equal(d1, d.Dates.DDate) assert.Equal(d2, d.Params["date"]) d = newTestFd() d.Frontmatter["date"] = d2 assert.NoError(handler.HandleDates(d)) - assert.Equal(d2, d.Dates.Date) + assert.Equal(d2, d.Dates.DDate) assert.Equal(d2, d.Params["date"]) } @@ -186,15 +186,15 @@ func TestFrontMatterDatesCustomConfig(t *testing.T) { assert.NoError(handler.HandleDates(d)) - assert.Equal(1, d.Dates.Date.Day()) - assert.Equal(4, d.Dates.Lastmod.Day()) - assert.Equal(4, d.Dates.PublishDate.Day()) - assert.Equal(5, d.Dates.ExpiryDate.Day()) + assert.Equal(1, d.Dates.DDate.Day()) + assert.Equal(4, d.Dates.DLastMod.Day()) + assert.Equal(4, d.Dates.DPublishDate.Day()) + assert.Equal(5, d.Dates.DExpiryDate.Day()) - assert.Equal(d.Dates.Date, d.Params["date"]) - assert.Equal(d.Dates.Date, d.Params["mydate"]) - assert.Equal(d.Dates.PublishDate, d.Params["publishdate"]) - assert.Equal(d.Dates.ExpiryDate, d.Params["expirydate"]) + assert.Equal(d.Dates.DDate, d.Params["date"]) + assert.Equal(d.Dates.DDate, d.Params["mydate"]) + assert.Equal(d.Dates.DPublishDate, d.Params["publishdate"]) + assert.Equal(d.Dates.DExpiryDate, d.Params["expirydate"]) assert.False(handler.IsDateKey("date")) // This looks odd, but is configured like this. assert.True(handler.IsDateKey("mydate")) @@ -227,10 +227,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) { assert.NoError(handler.HandleDates(d)) - assert.Equal(1, d.Dates.Date.Day()) - assert.Equal(2, d.Dates.Lastmod.Day()) - assert.Equal(4, d.Dates.PublishDate.Day()) - assert.True(d.Dates.ExpiryDate.IsZero()) + assert.Equal(1, d.Dates.DDate.Day()) + assert.Equal(2, d.Dates.DLastMod.Day()) + assert.Equal(4, d.Dates.DPublishDate.Day()) + assert.True(d.Dates.DExpiryDate.IsZero()) } @@ -252,10 +252,10 @@ func TestFrontMatterDateFieldHandler(t *testing.T) { fd := newTestFd() d, _ := time.Parse("2006-01-02", "2018-02-01") fd.Frontmatter["date"] = d - h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.Date = t }) + h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.DDate = t }) handled, err := h(fd) assert.True(handled) assert.NoError(err) - assert.Equal(d, fd.Dates.Date) + assert.Equal(d, fd.Dates.DDate) } diff --git a/hugolib/pagemeta/pagemeta.go b/hugolib/pagemeta/pagemeta.go index 93dc9a12f0b..6c92e02e465 100644 --- a/hugolib/pagemeta/pagemeta.go +++ b/hugolib/pagemeta/pagemeta.go @@ -24,9 +24,26 @@ type URLPath struct { Section string } +// TODO(bep) page type PageDates struct { - Date time.Time - Lastmod time.Time - PublishDate time.Time - ExpiryDate time.Time + DDate time.Time + DLastMod time.Time + DPublishDate time.Time + DExpiryDate time.Time +} + +func (p PageDates) Date() time.Time { + return p.DDate +} + +func (p PageDates) Lastmod() time.Time { + return p.DLastMod +} + +func (p PageDates) PublishDate() time.Time { + return p.DPublishDate +} + +func (p PageDates) ExpiryDate() time.Time { + return p.DExpiryDate } diff --git a/hugolib/pagesPrevNext.go b/hugolib/pagesPrevNext.go index 947a49b8581..1f52b3395ea 100644 --- a/hugolib/pagesPrevNext.go +++ b/hugolib/pagesPrevNext.go @@ -13,10 +13,14 @@ package hugolib +import ( + "github.com/gohugoio/hugo/resources/page" +) + // Prev returns the previous page reletive to the given page. -func (p Pages) Prev(cur *Page) *Page { +func (p Pages) Prev(cur page.Page) page.Page { for x, c := range p { - if c.Eq(cur) { + if c.(*Page).Eq(cur) { if x == 0 { // TODO(bep) consider return nil here to get it line with the other Prevs return p[len(p)-1] @@ -28,9 +32,9 @@ func (p Pages) Prev(cur *Page) *Page { } // Next returns the next page reletive to the given page. -func (p Pages) Next(cur *Page) *Page { +func (p Pages) Next(cur page.Page) page.Page { for x, c := range p { - if c.Eq(cur) { + if c.(*Page).Eq(cur) { if x < len(p)-1 { return p[x+1] } diff --git a/hugolib/pagesPrevNext_test.go b/hugolib/pagesPrevNext_test.go index 5945d8fe50b..0aa251e9831 100644 --- a/hugolib/pagesPrevNext_test.go +++ b/hugolib/pagesPrevNext_test.go @@ -59,10 +59,10 @@ func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages { if err != nil { t.Fatalf("failed to prepare test page %s", src.path) } - p.Weight = src.weight - p.Date = cast.ToTime(src.date) - p.PublishDate = cast.ToTime(src.date) - w = append(w, WeightedPage{p.Weight, p}) + p.weight = src.weight + p.DDate = cast.ToTime(src.date) + p.DPublishDate = cast.ToTime(src.date) + w = append(w, WeightedPage{p.weight, p}) } w.Sort() diff --git a/hugolib/pages_language_merge.go b/hugolib/pages_language_merge.go index 8bbae9a1271..8dbaef7648f 100644 --- a/hugolib/pages_language_merge.go +++ b/hugolib/pages_language_merge.go @@ -33,11 +33,13 @@ func (p1 Pages) MergeByLanguage(p2 Pages) Pages { merge := func(pages *Pages) { m := make(map[string]bool) for _, p := range *pages { - m[p.TranslationKey()] = true + pp := p.(*Page) + m[pp.TranslationKey()] = true } for _, p := range p2 { - if _, found := m[p.TranslationKey()]; !found { + pp := p.(*Page) + if _, found := m[pp.TranslationKey()]; !found { *pages = append(*pages, p) } } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index efcfbf04b34..e190859823f 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -48,7 +48,7 @@ func TestMergeLanguages(t *testing.T) { if i == 2 || i%3 == 0 || i == 31 { expectedLang = "nn" } - p := mergedNN[i-1] + p := mergedNN[i-1].(*Page) assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) } } @@ -60,24 +60,24 @@ func TestMergeLanguages(t *testing.T) { if i%5 == 0 { expectedLang = "fr" } - p := mergedFR[i-1] + p := mergedFR[i-1].(*Page) assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) } - firstNN := nnSite.RegularPages[0] + firstNN := nnSite.RegularPages[0].(*Page) assert.Equal(4, len(firstNN.Sites())) assert.Equal("en", firstNN.Sites().First().Language().Lang) nnBundle := nnSite.getPage("page", "bundle") enBundle := enSite.getPage("page", "bundle") - assert.Equal(6, len(enBundle.Resources)) - assert.Equal(2, len(nnBundle.Resources)) + assert.Equal(6, len(enBundle.Resources())) + assert.Equal(2, len(nnBundle.Resources())) - var ri interface{} = nnBundle.Resources + var ri interface{} = nnBundle.Resources() // This looks less ugly in the templates ... - mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources) + mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources()) assert.Equal(6, len(mergedNNResources)) unchanged, err := nnSite.RegularPages.MergeByLanguageInterface(nil) diff --git a/hugolib/pages_related.go b/hugolib/pages_related.go index 2881a45e6e3..7bd4765e214 100644 --- a/hugolib/pages_related.go +++ b/hugolib/pages_related.go @@ -110,7 +110,7 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela return nil, nil } - cache := p[0].s.relatedDocsHandler + cache := p[0].(*Page).s.relatedDocsHandler searchIndex, err := cache.getOrCreateIndex(p) if err != nil { diff --git a/hugolib/pages_related_test.go b/hugolib/pages_related_test.go index ed8d9df9d6d..cfb2abab894 100644 --- a/hugolib/pages_related_test.go +++ b/hugolib/pages_related_test.go @@ -54,22 +54,22 @@ Content assert.NoError(err) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 1", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 1", result[1].Title()) result, err = s.RegularPages.Related(s.RegularPages[0]) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords") assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks")) assert.NoError(err) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) } diff --git a/hugolib/pagination.go b/hugolib/pagination.go index 05846a6bb35..fde2e0b9910 100644 --- a/hugolib/pagination.go +++ b/hugolib/pagination.go @@ -21,6 +21,8 @@ import ( "reflect" "strings" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/config" "github.com/spf13/cast" @@ -120,7 +122,7 @@ func (p *Pager) element() paginatedElement { } // page returns the Page with the given index -func (p *Pager) page(index int) (*Page, error) { +func (p *Pager) page(index int) (page.Page, error) { if pages, ok := p.element().(Pages); ok { if pages != nil && len(pages) > index { @@ -221,7 +223,7 @@ func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement { type keyPage struct { key interface{} - page *Page + page page.Page } var ( @@ -270,7 +272,7 @@ func (p *Page) Paginator(options ...interface{}) (*Pager, error) { // If it's not, one will be created with all pages in Data["Pages"]. func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) { if !p.IsNode() { - return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title) + return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind(), p.Title()) } pagerSize, err := resolvePagerSize(p.s.Cfg, options...) @@ -321,7 +323,7 @@ func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error) // Note that repeated calls will return the same result, even if the sequence is different. func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager, error) { if !p.IsNode() { - return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title) + return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind(), p.Title()) } pagerSize, err := resolvePagerSize(p.s.Cfg, options...) @@ -458,8 +460,6 @@ func toPages(seq interface{}) (Pages, error) { return v, nil case *Pages: return *(v), nil - case []*Page: - return Pages(v), nil case WeightedPages: return v.Pages(), nil case PageGroup: diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go index 5dbef609bdc..473d5d4a1fa 100644 --- a/hugolib/pagination_test.go +++ b/hugolib/pagination_test.go @@ -59,7 +59,7 @@ func TestSplitPageGroups(t *testing.T) { // first group 10 in weight require.Equal(t, 10, pg.Key) for _, p := range pg.Pages { - require.True(t, p.fuzzyWordCount%2 == 0) // magic test + require.True(t, p.(*Page).fuzzyWordCount%2 == 0) // magic test } } } else { @@ -74,7 +74,7 @@ func TestSplitPageGroups(t *testing.T) { // last should have 5 in weight require.Equal(t, 5, pg.Key) for _, p := range pg.Pages { - require.True(t, p.fuzzyWordCount%2 != 0) // magic test + require.True(t, p.(*Page).fuzzyWordCount%2 != 0) // magic test } } } else { @@ -553,10 +553,10 @@ func TestPage(t *testing.T) { page21, _ := f2.page(1) page2Nil, _ := f2.page(3) - require.Equal(t, 3, page11.fuzzyWordCount) + require.Equal(t, 3, page11.(*Page).fuzzyWordCount) require.Nil(t, page1Nil) - require.Equal(t, 3, page21.fuzzyWordCount) + require.Equal(t, 3, page21.(*Page).fuzzyWordCount) require.Nil(t, page2Nil) } @@ -570,7 +570,7 @@ func createTestPages(s *Site, num int) Pages { w = 10 } p.fuzzyWordCount = i + 2 - p.Weight = w + p.weight = w pages[i] = p } diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go index 3d261a113e3..1ad9dd0dc26 100644 --- a/hugolib/permalinks.go +++ b/hugolib/permalinks.go @@ -131,19 +131,19 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) { // a Page contains a Node which provides a field Date, time.Time switch dateField { case "year": - return strconv.Itoa(p.Date.Year()), nil + return strconv.Itoa(p.Date().Year()), nil case "month": - return fmt.Sprintf("%02d", int(p.Date.Month())), nil + return fmt.Sprintf("%02d", int(p.Date().Month())), nil case "monthname": - return p.Date.Month().String(), nil + return p.Date().Month().String(), nil case "day": - return fmt.Sprintf("%02d", p.Date.Day()), nil + return fmt.Sprintf("%02d", p.Date().Day()), nil case "weekday": - return strconv.Itoa(int(p.Date.Weekday())), nil + return strconv.Itoa(int(p.Date().Weekday())), nil case "weekdayname": - return p.Date.Weekday().String(), nil + return p.Date().Weekday().String(), nil case "yearday": - return strconv.Itoa(p.Date.YearDay()), nil + return strconv.Itoa(p.Date().YearDay()), nil } //TODO: support classic strftime escapes too // (and pass those through despite not being in the map) diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go index 16ff0b7806b..17bbd780de9 100644 --- a/hugolib/shortcode_test.go +++ b/hugolib/shortcode_test.go @@ -87,7 +87,7 @@ title: "Title" require.Len(t, h.Sites[0].RegularPages, 1) - output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].content())) + output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].(*Page).content())) output = strings.TrimPrefix(output, "") output = strings.TrimSuffix(output, "
") diff --git a/hugolib/site.go b/hugolib/site.go index 43b398b7059..910ca89398f 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -174,7 +174,8 @@ func (s *Site) initRenderFormats() { formatSet := make(map[string]bool) formats := output.Formats{} for _, p := range s.Pages { - for _, f := range p.outputFormats { + pp := p.(*Page) + for _, f := range pp.outputFormats { if !formatSet[f.Name] { formats = append(formats, f) formatSet[f.Name] = true @@ -860,7 +861,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) { // pages that keeps a reference to the changed shortcode. pagesWithShortcode := h.findPagesByShortcode(shortcode) for _, p := range pagesWithShortcode { - contentFilesChanged = append(contentFilesChanged, p.File.Filename()) + contentFilesChanged = append(contentFilesChanged, p.(*Page).File.Filename()) } } @@ -1047,12 +1048,13 @@ func (s *Site) setupSitePages() { var siteLastChange time.Time for i, page := range s.RegularPages { + pagep := page.(*Page) if i > 0 { - page.NextPage = s.RegularPages[i-1] + pagep.NextPage = s.RegularPages[i-1] } if i < len(s.RegularPages)-1 { - page.PrevPage = s.RegularPages[i+1] + pagep.PrevPage = s.RegularPages[i+1] } // Determine Site.Info.LastChange @@ -1060,8 +1062,8 @@ func (s *Site) setupSitePages() { // is already applied, so this is *the* date to use. // We cannot just pick the last page in the default sort, because // that may not be ordered by date. - if page.Lastmod.After(siteLastChange) { - siteLastChange = page.Lastmod + if pagep.Lastmod().After(siteLastChange) { + siteLastChange = pagep.Lastmod() } } @@ -1360,7 +1362,7 @@ func (s *Site) buildSiteMeta() (err error) { for _, p := range s.AllPages { // this depends on taxonomies - p.setValuesForKind(s) + p.(*Page).setValuesForKind(s) } return @@ -1438,18 +1440,18 @@ func (s *Site) assembleMenus() { if sectionPagesMenu != "" { for _, p := range pages { - if p.Kind == KindSection { + if p.Kind() == KindSection { // From Hugo 0.22 we have nested sections, but until we get a // feel of how that would work in this setting, let us keep // this menu for the top level only. - id := p.Section() + id := p.(*Page).Section() if _, ok := flat[twoD{sectionPagesMenu, id}]; ok { continue } me := MenuEntry{Identifier: id, Name: p.LinkTitle(), - Weight: p.Weight, + Weight: p.Weight(), URL: p.RelPermalink()} flat[twoD{sectionPagesMenu, me.KeyName()}] = &me } @@ -1458,9 +1460,10 @@ func (s *Site) assembleMenus() { // Add menu entries provided by pages for _, p := range pages { - for name, me := range p.Menus() { + pp := p.(*Page) + for name, me := range pp.Menus() { if _, ok := flat[twoD{name, me.KeyName()}]; ok { - s.SendError(p.errWithFileContext(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))) + s.SendError(p.(*Page).errWithFileContext(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))) continue } flat[twoD{name, me.KeyName()}] = me @@ -1526,12 +1529,13 @@ func (s *Site) assembleTaxonomies() { s.taxonomiesPluralSingular[plural] = singular for _, p := range s.Pages { - vals := p.getParam(plural, !s.Info.preserveTaxonomyNames) + pp := p.(*Page) + vals := pp.getParam(plural, !s.Info.preserveTaxonomyNames) - w := p.getParamToLower(plural + "_weight") + w := pp.getParamToLower(plural + "_weight") weight, err := cast.ToIntE(w) if err != nil { - s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %s", w, p.File.Path()) + s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %s", w, pp.File.Path()) // weight will equal zero, so let the flow continue } @@ -1553,7 +1557,7 @@ func (s *Site) assembleTaxonomies() { s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, s.PathSpec.MakePathSanitized(v))] = v } } else { - s.Log.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path()) + s.Log.ERROR.Printf("Invalid %s in %s\n", plural, pp.File.Path()) } } } @@ -1579,10 +1583,11 @@ func (s *Site) resetBuildState() { s.expiredCount = 0 for _, p := range s.rawAllPages { - p.subSections = Pages{} - p.parent = nil - p.scratch = maps.NewScratch() - p.mainPageOutput = nil + pp := p.(*Page) + pp.subSections = Pages{} + pp.parent = nil + pp.scratch = maps.NewScratch() + pp.mainPageOutput = nil } } @@ -1594,10 +1599,11 @@ func (s *Site) preparePages() error { var errors []error for _, p := range s.Pages { - if err := p.prepareLayouts(); err != nil { + pp := p.(*Page) + if err := pp.prepareLayouts(); err != nil { errors = append(errors, err) } - if err := p.prepareData(s); err != nil { + if err := pp.prepareData(s); err != nil { errors = append(errors, err) } } @@ -1688,7 +1694,7 @@ func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath s renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) - if err := s.renderForLayouts(p.Kind, p, renderBuffer, layouts...); err != nil { + if err := s.renderForLayouts(p.Kind(), p, renderBuffer, layouts...); err != nil { return err } @@ -1809,14 +1815,14 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page { language: s.Language, pageInit: &pageInit{}, pageContentInit: &pageContentInit{}, - Kind: typ, + kind: typ, File: &source.FileInfo{}, data: make(map[string]interface{}), Site: &s.Info, sections: sections, s: s} - p.outputFormats = p.s.outputFormats[p.Kind] + p.outputFormats = p.s.outputFormats[p.Kind()] return p diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 4ce2b4c53d1..7e4cfefcf31 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -49,8 +49,9 @@ func (s *Site) renderPages(cfg *BuildCfg) error { } for _, page := range s.Pages { - if cfg.shouldRender(page) { - pages <- page + pagep := page.(*Page) + if cfg.shouldRender(pagep) { + pages <- pagep } } @@ -70,14 +71,15 @@ func (s *Site) renderPages(cfg *BuildCfg) error { func headlessPagesPublisher(s *Site, wg *sync.WaitGroup) { defer wg.Done() for _, page := range s.headlessPages { - outFormat := page.outputFormats[0] // There is only one + pagep := page.(*Page) + outFormat := pagep.outputFormats[0] // There is only one if outFormat.Name != s.rc.Format.Name { // Avoid double work. continue } - pageOutput, err := newPageOutput(page, false, false, outFormat) + pageOutput, err := newPageOutput(pagep, false, false, outFormat) if err == nil { - page.mainPageOutput = pageOutput + page.(*Page).mainPageOutput = pageOutput err = pageOutput.renderResources() } @@ -164,7 +166,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa continue } - s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts) + s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind(), targetPath, layouts) if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil { results <- err @@ -219,8 +221,8 @@ func (s *Site) renderPaginator(p *PageOutput) error { pagerNode.paginator = pager if pager.TotalPages() > 0 { first, _ := pager.page(0) - pagerNode.Date = first.Date - pagerNode.Lastmod = first.Lastmod + pagerNode.DDate = first.Date() + pagerNode.DLastMod = first.Lastmod() } pageNumber := i + 1 @@ -337,16 +339,17 @@ func (s *Site) renderSitemap() error { // TODO(bep) this should be done somewhere else for _, page := range pages { - if page.Sitemap.ChangeFreq == "" { - page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq + pagep := page.(*Page) + if pagep.Sitemap.ChangeFreq == "" { + pagep.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq } - if page.Sitemap.Priority == -1 { - page.Sitemap.Priority = sitemapDefault.Priority + if pagep.Sitemap.Priority == -1 { + pagep.Sitemap.Priority = sitemapDefault.Priority } - if page.Sitemap.Filename == "" { - page.Sitemap.Filename = sitemapDefault.Filename + if pagep.Sitemap.Filename == "" { + pagep.Sitemap.Filename = sitemapDefault.Filename } } @@ -392,32 +395,34 @@ func (s *Site) renderRobotsTXT() error { // renderAliases renders shell pages that simply have a redirect in the header. func (s *Site) renderAliases() error { for _, p := range s.Pages { - if len(p.Aliases) == 0 { + pp := p.(*Page) + + if len(pp.Aliases) == 0 { continue } - for _, f := range p.outputFormats { + for _, f := range pp.outputFormats { if !f.IsHTML { continue } - o := newOutputFormat(p, f) + o := newOutputFormat(pp, f) plink := o.Permalink() - for _, a := range p.Aliases { + for _, a := range pp.Aliases { if f.Path != "" { // Make sure AMP and similar doesn't clash with regular aliases. a = path.Join(a, f.Path) } - lang := p.Lang() + lang := pp.Lang() if s.owner.multihost && !strings.HasPrefix(a, "/"+lang) { // These need to be in its language root. a = path.Join(lang, a) } - if err := s.writeDestAlias(a, plink, f, p); err != nil { + if err := s.writeDestAlias(a, plink, f, pp); err != nil { return err } } diff --git a/hugolib/site_sections.go b/hugolib/site_sections.go index 38f6a3b6fce..1a6d1943788 100644 --- a/hugolib/site_sections.go +++ b/hugolib/site_sections.go @@ -19,6 +19,8 @@ import ( "strconv" "strings" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/helpers" radix "github.com/hashicorp/go-immutable-radix" @@ -112,7 +114,7 @@ func (p *Page) IsDescendant(other interface{}) (bool, error) { return false, err } - if pp.Kind == KindPage && len(p.sections) == len(pp.sections) { + if pp.Kind() == KindPage && len(p.sections) == len(pp.sections) { // A regular page is never its section's descendant. return false, nil } @@ -131,7 +133,7 @@ func (p *Page) IsAncestor(other interface{}) (bool, error) { return false, err } - if p.Kind == KindPage && len(p.sections) == len(pp.sections) { + if p.Kind() == KindPage && len(p.sections) == len(pp.sections) { // A regular page is never its section's ancestor. return false, nil } @@ -180,11 +182,13 @@ func (s *Site) assembleSections() Pages { } // Maps section kind pages to their path, i.e. "my/section" - sectionPages := make(map[string]*Page) + sectionPages := make(map[string]page.Page) // The sections with content files will already have been created. for _, sect := range s.findPagesByKind(KindSection) { - sectionPages[path.Join(sect.sections...)] = sect + sectp := sect.(*Page) + sectionPages[path.Join(sectp.sections...)] = sect + } const ( @@ -202,33 +206,35 @@ func (s *Site) assembleSections() Pages { home := s.findFirstPageByKindIn(KindHome, s.Pages) for i, p := range s.Pages { - if p.Kind != KindPage { + if p.Kind() != KindPage { continue } - if len(p.sections) == 0 { + pp := p.(*Page) + + if len(pp.sections) == 0 { // Root level pages. These will have the home page as their Parent. - p.parent = home + pp.parent = home continue } - sectionKey := path.Join(p.sections...) + sectionKey := path.Join(pp.sections...) sect, found := sectionPages[sectionKey] - if !found && len(p.sections) == 1 { + if !found && len(pp.sections) == 1 { // We only create content-file-less sections for the root sections. - sect = s.newSectionPage(p.sections[0]) + sect = s.newSectionPage(pp.sections[0]) sectionPages[sectionKey] = sect newPages = append(newPages, sect) found = true } - if len(p.sections) > 1 { + if len(pp.sections) > 1 { // Create the root section if not found. - _, rootFound := sectionPages[p.sections[0]] + _, rootFound := sectionPages[pp.sections[0]] if !rootFound { - sect = s.newSectionPage(p.sections[0]) - sectionPages[p.sections[0]] = sect + sect = s.newSectionPage(pp.sections[0]) + sectionPages[pp.sections[0]] = sect newPages = append(newPages, sect) } } @@ -246,15 +252,16 @@ func (s *Site) assembleSections() Pages { // given a content file in /content/a/b/c/_index.md, we cannot create just // the c section. for _, sect := range sectionPages { - for i := len(sect.sections); i > 0; i-- { - sectionPath := sect.sections[:i] + sectp := sect.(*Page) + for i := len(sectp.sections); i > 0; i-- { + sectionPath := sectp.sections[:i] sectionKey := path.Join(sectionPath...) - sect, found := sectionPages[sectionKey] + _, found := sectionPages[sectionKey] if !found { - sect = s.newSectionPage(sectionPath[len(sectionPath)-1]) - sect.sections = sectionPath - sectionPages[sectionKey] = sect - newPages = append(newPages, sect) + sectp = s.newSectionPage(sectionPath[len(sectionPath)-1]) + sectp.sections = sectionPath + sectionPages[sectionKey] = sectp + newPages = append(newPages, sectp) } } } @@ -271,8 +278,10 @@ func (s *Site) assembleSections() Pages { ) for i, p := range undecided { + pp := p.(*Page) // Now we can decide where to put this page into the tree. - sectionKey := path.Join(p.sections...) + sectionKey := path.Join(pp.sections...) + _, v, _ := rootSections.LongestPrefix([]byte(sectionKey)) sect := v.(*Page) pagePath := path.Join(path.Join(sect.sections...), sectSectKey, "u", strconv.Itoa(i)) @@ -284,7 +293,7 @@ func (s *Site) assembleSections() Pages { rootPages.Walk(func(path []byte, v interface{}) bool { p := v.(*Page) - if p.Kind == KindSection { + if p.Kind() == KindSection { if currentSection != nil { // A new section currentSection.setPagePages(children) @@ -309,17 +318,18 @@ func (s *Site) assembleSections() Pages { // Build the sections hierarchy for _, sect := range sectionPages { - if len(sect.sections) == 1 { - sect.parent = home + sectp := sect.(*Page) + if len(sectp.sections) == 1 { + sectp.parent = home } else { - parentSearchKey := path.Join(sect.sections[:len(sect.sections)-1]...) + parentSearchKey := path.Join(sectp.sections[:len(sectp.sections)-1]...) _, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey)) p := v.(*Page) - sect.parent = p + sectp.parent = p } - if sect.parent != nil { - sect.parent.subSections = append(sect.parent.subSections, sect) + if sectp.parent != nil { + sectp.parent.subSections = append(sectp.parent.subSections, sect) } } @@ -334,23 +344,25 @@ func (s *Site) assembleSections() Pages { mainSections, mainSectionsFound = s.Info.Params[sectionsParamIdLower] for _, sect := range sectionPages { - if sect.parent != nil { - sect.parent.subSections.sort() + sectp := sect.(*Page) + if sectp.parent != nil { + sectp.parent.subSections.sort() } - for i, p := range sect.Pages { + for i, p := range sectp.Pages { + pp := p.(*Page) if i > 0 { - p.NextInSection = sect.Pages[i-1] + pp.NextInSection = sectp.Pages[i-1] } - if i < len(sect.Pages)-1 { - p.PrevInSection = sect.Pages[i+1] + if i < len(sectp.Pages)-1 { + pp.PrevInSection = sectp.Pages[i+1] } } if !mainSectionsFound { - weight := len(sect.Pages) + (len(sect.Sections()) * 5) + weight := len(sectp.Pages) + (len(sectp.Sections()) * 5) if weight >= maxSectionWeight { - mainSections = []string{sect.Section()} + mainSections = []string{sectp.Section()} maxSectionWeight = weight } } diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index 1987d2bcb1e..acdcc00b193 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -126,13 +126,13 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"elsewhere", func(p *Page) { assert.Len(p.Pages, 1) for _, p := range p.Pages { - assert.Equal([]string{"elsewhere"}, p.sections) + assert.Equal([]string{"elsewhere"}, p.(*Page).sections) } }}, {"post", func(p *Page) { assert.Len(p.Pages, 2) for _, p := range p.Pages { - assert.Equal("post", p.Section()) + assert.Equal("post", p.(*Page).Section()) } }}, {"empty1", func(p *Page) { @@ -163,7 +163,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} b := p.s.getPage(KindSection, "empty3", "b") assert.NotNil(b) assert.Len(b.Pages, 1) - assert.Equal("empty3.md", b.Pages[0].File.LogicalName()) + assert.Equal("empty3.md", b.Pages[0].(*Page).File.LogicalName()) }}, {"empty3", func(p *Page) { @@ -174,8 +174,8 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"top", func(p *Page) { assert.Equal("Tops", p.title) assert.Len(p.Pages, 2) - assert.Equal("mypage2.md", p.Pages[0].LogicalName()) - assert.Equal("mypage3.md", p.Pages[1].LogicalName()) + assert.Equal("mypage2.md", p.Pages[0].(*Page).LogicalName()) + assert.Equal("mypage3.md", p.Pages[1].(*Page).LogicalName()) home := p.Parent() assert.True(home.IsHome()) assert.Len(p.Sections(), 0) @@ -194,15 +194,16 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"l1,l2", func(p *Page) { assert.Equal("T2_-1", p.title) assert.Len(p.Pages, 3) - assert.Equal(p, p.Pages[0].Parent()) + assert.Equal(p, p.Pages[0].(*Page).Parent()) assert.Equal("L1s", p.Parent().title) assert.Equal("/l1/l2/", p.URLPath.URL) assert.Equal("/l1/l2/", p.RelPermalink()) assert.Len(p.Sections(), 1) for _, child := range p.Pages { - assert.Equal(p, child.CurrentSection()) - active, err := child.InSection(p) + childp := child.(*Page) + assert.Equal(p, childp.CurrentSection()) + active, err := childp.InSection(p) assert.NoError(err) assert.True(active) active, err = p.InSection(child) @@ -215,14 +216,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} isAncestor, err := p.IsAncestor(child) assert.NoError(err) assert.True(isAncestor) - isAncestor, err = child.IsAncestor(p) + isAncestor, err = childp.IsAncestor(p) assert.NoError(err) assert.False(isAncestor) isDescendant, err := p.IsDescendant(child) assert.NoError(err) assert.False(isDescendant) - isDescendant, err = child.IsDescendant(p) + isDescendant, err = childp.IsDescendant(p) assert.NoError(err) assert.True(isDescendant) } @@ -233,7 +234,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"l1,l2_2", func(p *Page) { assert.Equal("T22_-1", p.title) assert.Len(p.Pages, 2) - assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path()) + assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].(*Page).Path()) assert.Equal("L1s", p.Parent().title) assert.Len(p.Sections(), 0) }}, diff --git a/hugolib/site_test.go b/hugolib/site_test.go index bf46c313abe..aeaadc49bd9 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -138,7 +138,7 @@ func TestFutureExpirationRender(t *testing.T) { } } - if s.AllPages[0].title == "doc2" { + if s.AllPages[0].Title() == "doc2" { t.Fatal("Expired content published unexpectedly") } } @@ -335,7 +335,7 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) { } for _, p := range s.RegularPages { - assert.False(t, p.IsHome()) + assert.False(t, p.(*Page).IsHome()) } for _, test := range tests { @@ -610,40 +610,40 @@ func TestOrderedPages(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) - if s.getPage(KindSection, "sect").Pages[1].title != "Three" || s.getPage(KindSection, "sect").Pages[2].title != "Four" { + if s.getPage(KindSection, "sect").Pages[1].Title() != "Three" || s.getPage(KindSection, "sect").Pages[2].Title() != "Four" { t.Error("Pages in unexpected order.") } bydate := s.RegularPages.ByDate() - if bydate[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].title) + if bydate[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title()) } rev := bydate.Reverse() - if rev[0].title != "Three" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].title) + if rev[0].Title() != "Three" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title()) } bypubdate := s.RegularPages.ByPublishDate() - if bypubdate[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].title) + if bypubdate[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title()) } rbypubdate := bypubdate.Reverse() - if rbypubdate[0].title != "Three" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].title) + if rbypubdate[0].Title() != "Three" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title()) } bylength := s.RegularPages.ByLength() - if bylength[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].title) + if bylength[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title()) } rbylength := bylength.Reverse() - if rbylength[0].title != "Four" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].title) + if rbylength[0].Title() != "Four" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].Title()) } } @@ -682,8 +682,8 @@ func TestGroupedPages(t *testing.T) { if rbysection[2].Key != "sect1" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key) } - if rbysection[0].Pages[0].title != "Four" { - t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].title) + if rbysection[0].Pages[0].Title() != "Four" { + t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].Title()) } if len(rbysection[2].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages)) @@ -702,8 +702,8 @@ func TestGroupedPages(t *testing.T) { if bytype[2].Key != "sect3" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key) } - if bytype[2].Pages[0].title != "Four" { - t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].title) + if bytype[2].Pages[0].Title() != "Four" { + t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].Title()) } if len(bytype[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages)) @@ -730,8 +730,8 @@ func TestGroupedPages(t *testing.T) { if bypubdate[1].Key != "0001" { t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key) } - if bypubdate[0].Pages[0].title != "Three" { - t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].title) + if bypubdate[0].Pages[0].Title() != "Three" { + t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].Title()) } if len(bypubdate[0].Pages) != 3 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages)) @@ -750,8 +750,8 @@ func TestGroupedPages(t *testing.T) { if byparam[2].Key != "bar" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key) } - if byparam[2].Pages[0].title != "Three" { - t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].title) + if byparam[2].Pages[0].Title() != "Three" { + t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].Title()) } if len(byparam[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages)) @@ -783,8 +783,8 @@ func TestGroupedPages(t *testing.T) { if byParamDate[1].Key != "1979-05" { t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key) } - if byParamDate[1].Pages[0].title != "One" { - t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].title) + if byParamDate[1].Pages[0].Title() != "One" { + t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].Title()) } if len(byParamDate[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages)) @@ -840,16 +840,16 @@ func TestWeightedTaxonomies(t *testing.T) { writeSourcesToSource(t, "content", fs, sources...) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) - if s.Taxonomies["tags"]["a"][0].Page.title != "foo" { - t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.title) + if s.Taxonomies["tags"]["a"][0].Page.Title() != "foo" { + t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title()) } - if s.Taxonomies["categories"]["d"][0].Page.title != "bar" { - t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.title) + if s.Taxonomies["categories"]["d"][0].Page.Title() != "bar" { + t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.Title()) } - if s.Taxonomies["categories"]["e"][0].Page.title != "bza" { - t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.title) + if s.Taxonomies["categories"]["e"][0].Page.Title() != "bza" { + t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.Title()) } } diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go index c8447d1bae0..92b1591c328 100644 --- a/hugolib/taxonomy.go +++ b/hugolib/taxonomy.go @@ -16,6 +16,8 @@ package hugolib import ( "fmt" "sort" + + "github.com/gohugoio/hugo/resources/page" ) // The TaxonomyList is a list of all taxonomies and their values @@ -39,11 +41,11 @@ type WeightedPages []WeightedPage // A WeightedPage is a Page with a weight. type WeightedPage struct { Weight int - *Page + page.Page } func (w WeightedPage) String() string { - return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.title) + return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title()) } // OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map. @@ -176,9 +178,9 @@ func (wp WeightedPages) Pages() Pages { // Prev returns the previous Page relative to the given Page in // this weighted page set. -func (wp WeightedPages) Prev(cur *Page) *Page { +func (wp WeightedPages) Prev(cur page.Page) page.Page { for x, c := range wp { - if c.Page.UniqueID() == cur.UniqueID() { + if c.Page == cur { if x == 0 { return wp[len(wp)-1].Page } @@ -190,9 +192,9 @@ func (wp WeightedPages) Prev(cur *Page) *Page { // Next returns the next Page relative to the given Page in // this weighted page set. -func (wp WeightedPages) Next(cur *Page) *Page { +func (wp WeightedPages) Next(cur page.Page) page.Page { for x, c := range wp { - if c.Page.UniqueID() == cur.UniqueID() { + if c.Page == cur { if x < len(wp)-1 { return wp[x+1].Page } @@ -213,10 +215,10 @@ func (wp WeightedPages) Count() int { return len(wp) } func (wp WeightedPages) Less(i, j int) bool { if wp[i].Weight == wp[j].Weight { - if wp[i].Page.Date.Equal(wp[j].Page.Date) { - return wp[i].Page.title < wp[j].Page.title + if wp[i].Page.Date().Equal(wp[j].Page.Date()) { + return wp[i].Page.Title() < wp[j].Page.Title() } - return wp[i].Page.Date.After(wp[i].Page.Date) + return wp[i].Page.Date().After(wp[i].Page.Date()) } return wp[i].Weight < wp[j].Weight } diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index 1ae9fae228f..6578698f952 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -186,7 +186,7 @@ permalinkeds: require.Len(t, term.Pages, count) for _, page := range term.Pages { - require.Equal(t, KindTaxonomy, page.Kind) + require.Equal(t, KindTaxonomy, page.Kind()) } } diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index 64d1ff96a23..e761a26dec2 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -700,7 +700,7 @@ func dumpPages(pages ...*Page) { for i, p := range pages { fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n", i+1, - p.Kind, p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections())) + p.Kind(), p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections())) } } diff --git a/hugolib/translations.go b/hugolib/translations.go index 2682363f003..01b6cf01738 100644 --- a/hugolib/translations.go +++ b/hugolib/translations.go @@ -13,23 +13,28 @@ package hugolib +import ( + "github.com/gohugoio/hugo/resources/page" +) + // Translations represent the other translations for a given page. The // string here is the language code, as affected by the `post.LANG.md` // filename. -type Translations map[string]*Page +type Translations map[string]page.Page -func pagesToTranslationsMap(pages []*Page) map[string]Translations { +func pagesToTranslationsMap(pages Pages) map[string]Translations { out := make(map[string]Translations) for _, page := range pages { - base := page.TranslationKey() + pagep := page.(*Page) + base := pagep.TranslationKey() pageTranslation, present := out[base] if !present { pageTranslation = make(Translations) } - pageLang := page.Lang() + pageLang := pagep.Lang() if pageLang == "" { continue } @@ -41,19 +46,20 @@ func pagesToTranslationsMap(pages []*Page) map[string]Translations { return out } -func assignTranslationsToPages(allTranslations map[string]Translations, pages []*Page) { +func assignTranslationsToPages(allTranslations map[string]Translations, pages Pages) { for _, page := range pages { - page.translations = page.translations[:0] - base := page.TranslationKey() + pagep := page.(*Page) + pagep.translations = pagep.translations[:0] + base := pagep.TranslationKey() trans, exist := allTranslations[base] if !exist { continue } for _, translatedPage := range trans { - page.translations = append(page.translations, translatedPage) + pagep.translations = append(pagep.translations, translatedPage) } - pageBy(languagePageSort).Sort(page.translations) + pageBy(languagePageSort).Sort(pagep.translations) } } diff --git a/related/inverted_index.go b/related/inverted_index.go index 309eb4097d0..7dcf50e4b33 100644 --- a/related/inverted_index.go +++ b/related/inverted_index.go @@ -110,7 +110,7 @@ type Document interface { SearchKeywords(cfg IndexConfig) ([]Keyword, error) // When this document was or will be published. - PubDate() time.Time + PublishDate() time.Time } // InvertedIndex holds an inverted index, also sometimes named posting list, which @@ -211,7 +211,7 @@ func (r ranks) Len() int { return len(r) } func (r ranks) Swap(i, j int) { r[i], r[j] = r[j], r[i] } func (r ranks) Less(i, j int) bool { if r[i].Weight == r[j].Weight { - return r[i].Doc.PubDate().After(r[j].Doc.PubDate()) + return r[i].Doc.PublishDate().After(r[j].Doc.PublishDate()) } return r[i].Weight > r[j].Weight } @@ -250,7 +250,7 @@ func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document } - return idx.searchDate(doc.PubDate(), q...) + return idx.searchDate(doc.PublishDate(), q...) } // ToKeywords returns a Keyword slice of the given input. @@ -344,7 +344,7 @@ func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement) for _, doc := range docs { if applyDateFilter { // Exclude newer than the limit given - if doc.PubDate().After(upperDate) { + if doc.PublishDate().After(upperDate) { continue } } diff --git a/related/inverted_index_test.go b/related/inverted_index_test.go index 2e6b90bbf0b..eeba8111d09 100644 --- a/related/inverted_index_test.go +++ b/related/inverted_index_test.go @@ -72,7 +72,7 @@ func (d *testDoc) SearchKeywords(cfg IndexConfig) ([]Keyword, error) { return d.keywords[cfg.Name], nil } -func (d *testDoc) PubDate() time.Time { +func (d *testDoc) PublishDate() time.Time { return d.date } diff --git a/resources/page/page.go b/resources/page/page.go new file mode 100644 index 00000000000..a4b5c09e2c1 --- /dev/null +++ b/resources/page/page.go @@ -0,0 +1,47 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package page contains the core interfaces and types for the Page resource, +// a core component in Hugo. +package page + +import ( + "github.com/gohugoio/hugo/related" + "github.com/gohugoio/hugo/resources/resource" +) + +// TODO(bep) page there is language and stuff going on. There will be +// page sources that does not care about that, so a "DefaultLanguagePage" wrapper... + +type Page interface { + resource.Resource + resource.ContentProvider + resource.LanguageProvider + resource.Dated + + Kind() string + + Param(key interface{}) (interface{}, error) + + Weight() int + LinkTitle() string + + Resources() resource.Resources + + // Make it indexable as a related.Document + SearchKeywords(cfg related.IndexConfig) ([]related.Keyword, error) +} + +// TranslationProvider provides translated versions of a Page. +type TranslationProvider interface { +} diff --git a/resources/resource/dates.go b/resources/resource/dates.go new file mode 100644 index 00000000000..fcbdac0ed27 --- /dev/null +++ b/resources/resource/dates.go @@ -0,0 +1,41 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import "time" + +// Dated wraps a "dated resource". These are the 4 dates that makes +// the date logic in Hugo. +type Dated interface { + Date() time.Time + Lastmod() time.Time + PublishDate() time.Time + ExpiryDate() time.Time +} + +// IsFuture returns whether the argument represents the future. +func IsFuture(d Dated) bool { + if d.PublishDate().IsZero() { + return false + } + return d.PublishDate().After(time.Now()) +} + +// IsExpired returns whether the argument is expired. +func IsExpired(d Dated) bool { + if d.ExpiryDate().IsZero() { + return false + } + return d.ExpiryDate().Before(time.Now()) +} diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go index 120d753e4f7..5d2ac8018eb 100644 --- a/resources/resource/resourcetypes.go +++ b/resources/resource/resourcetypes.go @@ -14,6 +14,7 @@ package resource import ( + "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/common/hugio" @@ -81,11 +82,15 @@ type Identifier interface { // ContentResource represents a Resource that provides a way to get to its content. // Most Resource types in Hugo implements this interface, including Page. -// This should be used with care, as it will read the file content into memory, but it -// should be cached as effectively as possible by the implementation. type ContentResource interface { resourceBase + ContentProvider +} +// ContentProvider provides Content. +// This should be used with care, as it will read the file content into memory, but it +// should be cached as effectively as possible by the implementation. +type ContentProvider interface { // Content returns this resource's content. It will be equivalent to reading the content // that RelPermalink points to in the published folder. // The return type will be contextual, and should be what you would expect: @@ -104,3 +109,14 @@ type ReadSeekCloserResource interface { resourceBase ReadSeekCloser() (hugio.ReadSeekCloser, error) } + +// LengthProvider is a Resource that provides a length +// (typically the length of the content). +type LengthProvider interface { + Len() int +} + +// LanguageProvider is a Resource in a language. +type LanguageProvider interface { + Language() *langs.Language +} diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go index bad65369fab..2353c206c86 100644 --- a/tpl/collections/collections.go +++ b/tpl/collections/collections.go @@ -329,13 +329,18 @@ func (ns *Namespace) Group(key interface{}, items interface{}) (interface{}, err return nil, errors.New("nil is not a valid key to group by") } + if g, ok := items.(collections.Grouper); ok { + return g.Group(key, items) + } + + // TODO(bep) page need below? in := newSliceElement(items) if g, ok := in.(collections.Grouper); ok { return g.Group(key, items) } - return nil, fmt.Errorf("grouping not supported for type %T", items) + return nil, fmt.Errorf("grouping not supported for type %T %T", items, in) } // IsSet returns whether a given array, channel, slice, or map has a key