Skip to content

Commit

Permalink
New pageState, markup
Browse files Browse the repository at this point in the history
  • Loading branch information
bep committed Jan 11, 2019
1 parent 5b01424 commit 8065d24
Show file tree
Hide file tree
Showing 10 changed files with 200 additions and 79 deletions.
24 changes: 12 additions & 12 deletions hugolib/hugo_sites.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,22 +140,23 @@ func (h *HugoSites) langSite() map[string]*Site {
// Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) page.Page {
for _, s := range h.Sites {
pos := findPagePosByFilename(s.rawAllPages, filename)
// TODO(bep) page remove the non-receiver variant in this and others
pos := s.rawAllPages.findPagePosByFilename(filename)
if pos == -1 {
continue
}
return s.rawAllPages[pos]
return s.rawAllPages[pos].p
}

// If not found already, this may be bundled in another content file.
dir := filepath.Dir(filename)

for _, s := range h.Sites {
pos := findPagePosByFilnamePrefix(s.rawAllPages, dir)
pos := s.rawAllPages.findPagePosByFilnamePrefix(dir)
if pos == -1 {
continue
}
return s.rawAllPages[pos]
return s.rawAllPages[pos].p
}
return nil
}
Expand Down Expand Up @@ -639,23 +640,22 @@ func (h *HugoSites) setupTranslations() {
for _, s := range h.Sites {
for _, p := range s.rawAllPages {
// TODO(bep) page .(*Page) and all others
pp := p.(*Page)
if p.Kind() == kindUnknown {
pp := p.p
if pp.Kind() == kindUnknown {
pp.kind = pp.kindFromSections()

}

if !pp.s.isEnabled(p.Kind()) {
if !pp.s.isEnabled(pp.Kind()) {
continue
}

shouldBuild := s.shouldBuild(p)
s.updateBuildStats(p)
shouldBuild := s.shouldBuild(pp)
s.updateBuildStats(pp)
if shouldBuild {
if pp.headless {
s.headlessPages = append(s.headlessPages, p)
s.headlessPages = append(s.headlessPages, p.p)
} else {
s.Pages = append(s.Pages, p)
s.Pages = append(s.Pages, p.p)
}
}
}
Expand Down
43 changes: 28 additions & 15 deletions hugolib/page.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ type Page struct {
// PageMeta contains page stats such as word count etc.
PageMeta

// Markup contains the markup type for the content.
Markup string
// markup contains the markup type for the content.
markup string

extension string
contentType string
Expand Down Expand Up @@ -648,7 +648,7 @@ func (p *Page) Authors() AuthorList {
// Returns the page as summary and main.
func (p *Page) setUserDefinedSummary(rawContentCopy []byte) (*summaryContent, error) {

sc, err := splitUserDefinedSummaryAndContent(p.Markup, rawContentCopy)
sc, err := splitUserDefinedSummaryAndContent(p.markup, rawContentCopy)

if err != nil {
return nil, err
Expand Down Expand Up @@ -760,7 +760,7 @@ func (p *Page) setAutoSummary() error {

func (p *Page) renderContent(content []byte) []byte {
return p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
Content: content, RenderTOC: true, PageFmt: p.Markup,
Content: content, RenderTOC: true, PageFmt: p.markup,
Cfg: p.Language(),
DocumentID: p.File().UniqueID(), DocumentName: p.File().Path(),
Config: p.getRenderingConfig()})
Expand Down Expand Up @@ -791,19 +791,21 @@ func (p *Page) getRenderingConfig() *helpers.BlackFriday {
return p.renderingConfig
}

func (s *Site) newPage(filename string) *Page {
func (s *Site) newPage(filename string) (*Page, error) {
fi := newFileInfo(
s.SourceSpec,
s.absContentDir(),
filename,
nil,
bundleNot,
)
return s.newPageFromFile(fi)

// TODO(bep) page unify
return s.newPageFromFile(fi, nil)
}

func (s *Site) newPageFromFile(fi *fileInfo) *Page {
return &Page{
func (s *Site) newPageFromFile(fi *fileInfo, r io.Reader) (*Page, error) {
p := &Page{
pageInit: &pageInit{},
pageContentInit: &pageContentInit{},
kind: kindFromFileInfo(fi),
Expand All @@ -816,6 +818,14 @@ func (s *Site) newPageFromFile(fi *fileInfo) *Page {
Site: &s.Info,
s: s,
}

if r != nil {
if _, err := p.ReadFrom(r); err != nil {
return nil, err
}
}

return p, nil
}

func (p *Page) IsRenderable() bool {
Expand Down Expand Up @@ -863,7 +873,10 @@ func (s *Site) NewPage(name string) (*Page, error) {
}

// Create new page
p := s.newPage(name)
p, err := s.newPage(name)
if err != nil {
return nil, err
}
p.s = s
p.Site = &s.Info

Expand Down Expand Up @@ -1132,7 +1145,7 @@ func (p *Page) prepareContent() error {
return err
}

if p.Markup != "html" && p.source.hasSummaryDivider {
if p.markup != "html" && p.source.hasSummaryDivider {

// Now we know enough to create a summary of the page and count some words
summaryContent, err := p.setUserDefinedSummary(workContentCopy)
Expand Down Expand Up @@ -1261,8 +1274,8 @@ func (p *Page) updateMetaData(frontmatter map[string]interface{}) error {
p.Layout = cast.ToString(v)
p.params[loki] = p.Layout
case "markup":
p.Markup = cast.ToString(v)
p.params[loki] = p.Markup
p.markup = cast.ToString(v)
p.params[loki] = p.markup
case "weight":
p.weight = cast.ToInt(v)
p.params[loki] = p.weight
Expand Down Expand Up @@ -1360,10 +1373,10 @@ func (p *Page) updateMetaData(frontmatter map[string]interface{}) error {
}

// Try markup explicitly set in the frontmatter
p.Markup = helpers.GuessType(p.Markup)
if p.Markup == "unknown" {
p.markup = helpers.GuessType(p.markup)
if p.markup == "unknown" {
// Fall back to file extension (might also return "unknown")
p.Markup = helpers.GuessType(p.File().Ext())
p.markup = helpers.GuessType(p.File().Ext())
}

if draft != nil && published != nil {
Expand Down
82 changes: 82 additions & 0 deletions hugolib/page_buildstate.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package hugolib

import (
"strings"

"github.com/gohugoio/hugo/resources/page"
)

// TODO(bep) page name etc.
type pageState struct {
s *Site
p *Page

workContent []byte

forceRender bool
}

func (p *pageState) contentMarkupType() string {
if p.p.markup != "" {
return p.p.markup

}
return p.p.File().Ext()
}

type pageStatePages []*pageState

func (ps pageStatePages) findPagePosByFilename(filename string) int {
for i, x := range ps {
if x.p.File().Filename() == filename {
return i
}
}
return -1
}

func (ps pageStatePages) findPagePosByFilnamePrefix(prefix string) int {
if prefix == "" {
return -1
}

lenDiff := -1
currPos := -1
prefixLen := len(prefix)

// Find the closest match
for i, x := range ps {
if strings.HasPrefix(x.p.File().Filename(), prefix) {
diff := len(x.p.File().Filename()) - prefixLen
if lenDiff == -1 || diff < lenDiff {
lenDiff = diff
currPos = i
}
}
}
return currPos
}

// findPagePos Given a page, it will find the position in Pages
// will return -1 if not found
func (ps pageStatePages) findPagePos(page page.Page) int {
for i, x := range ps {
if x.p.File().Filename() == page.File().Filename() {
return i
}
}
return -1
}
2 changes: 1 addition & 1 deletion hugolib/page_taxonomy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func TestParseTaxonomies(t *testing.T) {
} {

s := newTestSite(t)
p, _ := s.NewPage("page/with/taxonomy")
p, _ := s.newPage("page/with/taxonomy")
_, err := p.ReadFrom(strings.NewReader(test))
if err != nil {
t.Fatalf("Failed parsing %q: %s", test, err)
Expand Down
24 changes: 12 additions & 12 deletions hugolib/page_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ func checkError(t *testing.T, err error, expected string) {
func TestDegenerateEmptyPageZeroLengthName(t *testing.T) {
t.Parallel()
s := newTestSite(t)
_, err := s.NewPage("")
_, err := s.newPage("")
if err == nil {
t.Fatalf("A zero length page name must return an error")
}
Expand Down Expand Up @@ -1083,7 +1083,7 @@ func TestCreatePage(t *testing.T) {

for i, test := range tests {
s := newTestSite(t)
p, _ := s.NewPage("page")
p, _ := s.newPage("page")
if _, err := p.ReadFrom(strings.NewReader(test.r)); err != nil {
t.Fatalf("[%d] Unable to parse page: %s", i, err)
}
Expand All @@ -1100,7 +1100,7 @@ func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
}
for _, test := range tests {
s := newTestSite(t)
p, _ := s.NewPage("invalid/front/matter/short/delim")
p, _ := s.newPage("invalid/front/matter/short/delim")
_, err := p.ReadFrom(strings.NewReader(test.r))
checkError(t, err, test.err)
}
Expand All @@ -1124,7 +1124,7 @@ func TestShouldRenderContent(t *testing.T) {

for i, test := range tests {
s := newTestSite(t)
p, _ := s.NewPage("render/front/matter")
p, _ := s.newPage("render/front/matter")
_, err := p.ReadFrom(strings.NewReader(test.text))
msg := fmt.Sprintf("test %d", i)
assert.NoError(err, msg)
Expand All @@ -1136,13 +1136,13 @@ func TestShouldRenderContent(t *testing.T) {
func TestCalendarParamsVariants(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pageJSON, _ := s.NewPage("test/fileJSON.md")
pageJSON, _ := s.newPage("test/fileJSON.md")
_, _ = pageJSON.ReadFrom(strings.NewReader(pageWithCalendarJSONFrontmatter))

pageYAML, _ := s.NewPage("test/fileYAML.md")
pageYAML, _ := s.newPage("test/fileYAML.md")
_, _ = pageYAML.ReadFrom(strings.NewReader(pageWithCalendarYAMLFrontmatter))

pageTOML, _ := s.NewPage("test/fileTOML.md")
pageTOML, _ := s.newPage("test/fileTOML.md")
_, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter))

assert.True(t, compareObjects(pageJSON.params, pageYAML.params))
Expand All @@ -1153,7 +1153,7 @@ func TestCalendarParamsVariants(t *testing.T) {
func TestDifferentFrontMatterVarTypes(t *testing.T) {
t.Parallel()
s := newTestSite(t)
page, _ := s.NewPage("test/file1.md")
page, _ := s.newPage("test/file1.md")
_, _ = page.ReadFrom(strings.NewReader(pageWithVariousFrontmatterTypes))

dateval, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
Expand Down Expand Up @@ -1184,7 +1184,7 @@ func TestDifferentFrontMatterVarTypes(t *testing.T) {
func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) {
t.Parallel()
s := newTestSite(t)
p, _ := s.NewPage("invalid/front/matter/leading/ws")
p, _ := s.newPage("invalid/front/matter/leading/ws")
_, err := p.ReadFrom(strings.NewReader(invalidFrontmatterLadingWs))
if err != nil {
t.Fatalf("Unable to parse front matter given leading whitespace: %s", err)
Expand All @@ -1194,7 +1194,7 @@ func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) {
func TestSectionEvaluation(t *testing.T) {
t.Parallel()
s := newTestSite(t)
page, _ := s.NewPage(filepath.FromSlash("blue/file1.md"))
page, _ := s.newPage(filepath.FromSlash("blue/file1.md"))
page.ReadFrom(strings.NewReader(simplePage))
if page.Section() != "blue" {
t.Errorf("Section should be %s, got: %s", "blue", page.Section())
Expand Down Expand Up @@ -1419,7 +1419,7 @@ func TestPageSimpleMethods(t *testing.T) {
{func(p *Page) bool { return strings.Join(p.PlainWords(), " ") == "Do Be Do Be Do" }},
} {

p, _ := s.NewPage("Test")
p, _ := s.newPage("Test")
p.workContent = []byte("<h1>Do Be Do Be Do</h1>")
p.resetContent()
if !this.assertFunc(p) {
Expand Down Expand Up @@ -1930,7 +1930,7 @@ func BenchmarkParsePage(b *testing.B) {
buf.ReadFrom(f)
b.ResetTimer()
for i := 0; i < b.N; i++ {
page, _ := s.NewPage("bench")
page, _ := s.newPage("bench")
page.ReadFrom(bytes.NewReader(buf.Bytes()))
}
}
4 changes: 2 additions & 2 deletions hugolib/pagebundler.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ type siteContentProcessor struct {
numWorkers int

// The output Pages
pagesChan chan *Page
pagesChan chan *pageState

// Used for partial rebuilds (aka. live reload)
// Will signal replacement of pages in the site collection.
Expand Down Expand Up @@ -90,7 +90,7 @@ func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *s
fileSinglesChan: make(chan *fileInfo, numWorkers),
fileAssetsChan: make(chan []pathLangFile, numWorkers),
numWorkers: numWorkers,
pagesChan: make(chan *Page, numWorkers),
pagesChan: make(chan *pageState, numWorkers),
}
}

Expand Down
Loading

0 comments on commit 8065d24

Please sign in to comment.