diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 3a76b768c56..1fd70492ef8 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -87,6 +87,12 @@ jobs:
curl -LJO "https://github.com/sass/dart-sass/releases/download/${env:SASS_VERSION}/dart-sass-${env:SASS_VERSION}-windows-x64.zip";
Expand-Archive -Path "dart-sass-${env:SASS_VERSION}-windows-x64.zip" -DestinationPath .;
echo "$env:GITHUB_WORKSPACE/dart-sass/" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf-8 -Append
+ - if: matrix.os == 'ubuntu-latest'
+ name: Install staticcheck
+ run: go install honnef.co/go/tools/cmd/staticcheck@latest
+ - if: matrix.os == 'ubuntu-latest'
+ name: Run staticcheck
+ run: staticcheck ./...
- if: matrix.os != 'windows-latest'
name: Check
run: |
diff --git a/cache/dynacache/dynacache.go b/cache/dynacache/dynacache.go
new file mode 100644
index 00000000000..bb3f7b09877
--- /dev/null
+++ b/cache/dynacache/dynacache.go
@@ -0,0 +1,550 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package dynacache
+
+import (
+ "context"
+ "fmt"
+ "math"
+ "path"
+ "regexp"
+ "runtime"
+ "sync"
+ "time"
+
+ "github.com/bep/lazycache"
+ "github.com/bep/logg"
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/common/rungroup"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+const minMaxSize = 10
+
+// New creates a new cache.
+func New(opts Options) *Cache {
+ if opts.CheckInterval == 0 {
+ opts.CheckInterval = time.Second * 2
+ }
+
+ if opts.MaxSize == 0 {
+ opts.MaxSize = 100000
+ }
+ if opts.Log == nil {
+ panic("nil Log")
+ }
+
+ if opts.MinMaxSize == 0 {
+ opts.MinMaxSize = 30
+ }
+
+ stats := &stats{
+ opts: opts,
+ adjustmentFactor: 1.0,
+ currentMaxSize: opts.MaxSize,
+ availableMemory: config.GetMemoryLimit(),
+ }
+
+ infol := opts.Log.InfoCommand("dynacache")
+
+ c := &Cache{
+ partitions: make(map[string]PartitionManager),
+ opts: opts,
+ stats: stats,
+ infol: infol,
+ }
+
+ c.stop = c.start()
+
+ return c
+}
+
+// Options for the cache.
+type Options struct {
+ Log loggers.Logger
+ CheckInterval time.Duration
+ MaxSize int
+ MinMaxSize int
+ Running bool
+}
+
+// Options for a partition.
+type OptionsPartition struct {
+ // When to clear the this partition.
+ ClearWhen ClearWhen
+
+ // Weight is a number between 1 and 100 that indicates how, in general, how big this partition may get.
+ Weight int
+}
+
+func (o OptionsPartition) WeightFraction() float64 {
+ return float64(o.Weight) / 100
+}
+
+func (o OptionsPartition) CalculateMaxSize(maxSizePerPartition int) int {
+ return int(math.Floor(float64(maxSizePerPartition) * o.WeightFraction()))
+}
+
+// A dynamic partitioned cache.
+type Cache struct {
+ mu sync.RWMutex
+
+ partitions map[string]PartitionManager
+ opts Options
+ infol logg.LevelLogger
+
+ stats *stats
+ stopOnce sync.Once
+ stop func()
+}
+
+// ClearMatching clears all partition for which the predicate returns true.
+func (c *Cache) ClearMatching(predicate func(k, v any) bool) {
+ g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
+ NumWorkers: len(c.partitions),
+ Handle: func(ctx context.Context, partition PartitionManager) error {
+ partition.clearMatching(predicate)
+ return nil
+ },
+ })
+
+ for _, p := range c.partitions {
+ g.Enqueue(p)
+ }
+
+ g.Wait()
+}
+
+// ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account.
+func (c *Cache) ClearOnRebuild(changeset ...identity.Identity) {
+ g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
+ NumWorkers: len(c.partitions),
+ Handle: func(ctx context.Context, partition PartitionManager) error {
+ partition.clearOnRebuild(changeset...)
+ return nil
+ },
+ })
+
+ for _, p := range c.partitions {
+ g.Enqueue(p)
+ }
+
+ g.Wait()
+
+ // Clear any entries marked as stale above.
+ g = rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
+ NumWorkers: len(c.partitions),
+ Handle: func(ctx context.Context, partition PartitionManager) error {
+ partition.clearStale()
+ return nil
+ },
+ })
+
+ for _, p := range c.partitions {
+ g.Enqueue(p)
+ }
+
+ g.Wait()
+}
+
+type keysProvider interface {
+ Keys() []string
+}
+
+// Keys returns a list of keys in all partitions.
+func (c *Cache) Keys(predicate func(s string) bool) []string {
+ if predicate == nil {
+ predicate = func(s string) bool { return true }
+ }
+ var keys []string
+ for pn, g := range c.partitions {
+ pkeys := g.(keysProvider).Keys()
+ for _, k := range pkeys {
+ p := path.Join(pn, k)
+ if predicate(p) {
+ keys = append(keys, p)
+ }
+ }
+
+ }
+ return keys
+}
+
+func calculateMaxSizePerPartition(maxItemsTotal, totalWeightQuantity, numPartitions int) int {
+ if numPartitions == 0 {
+ panic("numPartitions must be > 0")
+ }
+ if totalWeightQuantity == 0 {
+ panic("totalWeightQuantity must be > 0")
+ }
+
+ avgWeight := float64(totalWeightQuantity) / float64(numPartitions)
+ return int(math.Floor(float64(maxItemsTotal) / float64(numPartitions) * (100.0 / avgWeight)))
+}
+
+// Stop stops the cache.
+func (c *Cache) Stop() {
+ c.stopOnce.Do(func() {
+ c.stop()
+ })
+}
+
+func (c *Cache) adjustCurrentMaxSize() {
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+
+ if len(c.partitions) == 0 {
+ return
+ }
+ var m runtime.MemStats
+ runtime.ReadMemStats(&m)
+ s := c.stats
+ s.memstatsCurrent = m
+ // fmt.Printf("\n\nAvailable = %v\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\nMaxSize = %d\nAdjustmentFactor=%f\n\n", helpers.FormatByteCount(s.availableMemory), helpers.FormatByteCount(m.Alloc), helpers.FormatByteCount(m.TotalAlloc), helpers.FormatByteCount(m.Sys), m.NumGC, c.stats.currentMaxSize, s.adjustmentFactor)
+
+ if s.availableMemory >= s.memstatsCurrent.Alloc {
+ if s.adjustmentFactor <= 1.0 {
+ s.adjustmentFactor += 0.2
+ }
+ } else {
+ // We're low on memory.
+ s.adjustmentFactor -= 0.4
+ }
+
+ if s.adjustmentFactor <= 0 {
+ s.adjustmentFactor = 0.05
+ }
+
+ if !s.adjustCurrentMaxSize() {
+ return
+ }
+
+ totalWeight := 0
+ for _, pm := range c.partitions {
+ totalWeight += pm.getOptions().Weight
+ }
+
+ maxSizePerPartition := calculateMaxSizePerPartition(c.stats.currentMaxSize, totalWeight, len(c.partitions))
+
+ evicted := 0
+ for _, p := range c.partitions {
+ evicted += p.adjustMaxSize(p.getOptions().CalculateMaxSize(maxSizePerPartition))
+ }
+
+ if evicted > 0 {
+ c.infol.
+ WithFields(
+ logg.Fields{
+ {Name: "evicted", Value: evicted},
+ {Name: "numGC", Value: m.NumGC},
+ {Name: "limit", Value: helpers.FormatByteCount(c.stats.availableMemory)},
+ {Name: "alloc", Value: helpers.FormatByteCount(m.Alloc)},
+ {Name: "totalAlloc", Value: helpers.FormatByteCount(m.TotalAlloc)},
+ },
+ ).Logf("adjusted partitions' max size")
+ }
+}
+
+func (c *Cache) start() func() {
+ ticker := time.NewTicker(c.opts.CheckInterval)
+ quit := make(chan struct{})
+
+ go func() {
+ for {
+ select {
+ case <-ticker.C:
+ c.adjustCurrentMaxSize()
+ case <-quit:
+ ticker.Stop()
+ return
+ }
+ }
+ }()
+
+ return func() {
+ close(quit)
+ }
+}
+
+var partitionNameRe = regexp.MustCompile(`^\/[a-zA-Z0-9]{4}(\/[a-zA-Z0-9]+)?(\/[a-zA-Z0-9]+)?`)
+
+// GetOrCreatePartition gets or creates a partition with the given name.
+func GetOrCreatePartition[K comparable, V any](c *Cache, name string, opts OptionsPartition) *Partition[K, V] {
+ if c == nil {
+ panic("nil Cache")
+ }
+ if opts.Weight < 1 || opts.Weight > 100 {
+ panic("invalid Weight, must be between 1 and 100")
+ }
+
+ if partitionNameRe.FindString(name) != name {
+ panic(fmt.Sprintf("invalid partition name %q", name))
+ }
+
+ c.mu.RLock()
+ p, found := c.partitions[name]
+ c.mu.RUnlock()
+ if found {
+ return p.(*Partition[K, V])
+ }
+
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ // Double check.
+ p, found = c.partitions[name]
+ if found {
+ return p.(*Partition[K, V])
+ }
+
+ // At this point, we don't know the the number of partitions or their configuration, but
+ // this will be re-adjusted later.
+ const numberOfPartitionsEstimate = 10
+ maxSize := opts.CalculateMaxSize(c.opts.MaxSize / numberOfPartitionsEstimate)
+
+ // Create a new partition and cache it.
+ partition := &Partition[K, V]{
+ c: lazycache.New(lazycache.Options[K, V]{MaxEntries: maxSize}),
+ maxSize: maxSize,
+ trace: c.opts.Log.Logger().WithLevel(logg.LevelTrace).WithField("partition", name),
+ opts: opts,
+ }
+ c.partitions[name] = partition
+
+ return partition
+}
+
+// Partition is a partition in the cache.
+type Partition[K comparable, V any] struct {
+ c *lazycache.Cache[K, V]
+
+ zero V
+
+ trace logg.LevelLogger
+ opts OptionsPartition
+
+ maxSize int
+}
+
+// GetOrCreate gets or creates a value for the given key.
+func (p *Partition[K, V]) GetOrCreate(key K, create func(key K) (V, error)) (V, error) {
+ v, _, err := p.c.GetOrCreate(key, create)
+ return v, err
+}
+
+// GetOrCreateWitTimeout gets or creates a value for the given key and times out if the create function
+// takes too long.
+func (p *Partition[K, V]) GetOrCreateWitTimeout(key K, duration time.Duration, create func(key K) (V, error)) (V, error) {
+ resultch := make(chan V, 1)
+ errch := make(chan error, 1)
+
+ go func() {
+ v, _, err := p.c.GetOrCreate(key, create)
+ if err != nil {
+ errch <- err
+ return
+ }
+ resultch <- v
+ }()
+
+ select {
+ case v := <-resultch:
+ return v, nil
+ case err := <-errch:
+ return p.zero, err
+ case <-time.After(duration):
+ return p.zero, &herrors.TimeoutError{
+ Duration: duration,
+ }
+ }
+}
+
+func (p *Partition[K, V]) clearMatching(predicate func(k, v any) bool) {
+ p.c.DeleteFunc(func(key K, v V) bool {
+ if predicate(key, v) {
+ p.trace.Log(
+ logg.StringFunc(
+ func() string {
+ return fmt.Sprintf("clearing cache key %v", key)
+ },
+ ),
+ )
+ return true
+ }
+ return false
+ })
+}
+
+func (p *Partition[K, V]) clearOnRebuild(changeset ...identity.Identity) {
+ opts := p.getOptions()
+ if opts.ClearWhen == ClearNever {
+ return
+ }
+
+ if opts.ClearWhen == ClearOnRebuild {
+ // Clear all.
+ p.Clear()
+ return
+ }
+
+ depsFinder := identity.NewFinder(identity.FinderConfig{})
+
+ shouldDelete := func(key K, v V) bool {
+ // We always clear elements marked as stale.
+ if resource.IsStaleAny(v) {
+ return true
+ }
+
+ // Now check if this entry has changed based on the changeset
+ // based on filesystem events.
+ if len(changeset) == 0 {
+ // Nothing changed.
+ return false
+ }
+
+ var probablyDependent bool
+ identity.WalkIdentitiesShallow(v, func(level int, id2 identity.Identity) bool {
+ for _, id := range changeset {
+ if r := depsFinder.Contains(id, id2, -1); r > 0 {
+ // It's probably dependent, evict from cache.
+ probablyDependent = true
+ return true
+ }
+ }
+ return false
+ })
+
+ return probablyDependent
+ }
+
+ // First pass.
+ // Second pass needs to be done in a separate loop to catch any
+ // elements marked as stale in the other partitions.
+ p.c.DeleteFunc(func(key K, v V) bool {
+ if shouldDelete(key, v) {
+ p.trace.Log(
+ logg.StringFunc(
+ func() string {
+ return fmt.Sprintf("first pass: clearing cache key %v", key)
+ },
+ ),
+ )
+ resource.MarkStale(v)
+ return true
+ }
+ return false
+ })
+}
+
+func (p *Partition[K, V]) Keys() []K {
+ var keys []K
+ p.c.DeleteFunc(func(key K, v V) bool {
+ keys = append(keys, key)
+ return false
+ })
+ return keys
+}
+
+func (p *Partition[K, V]) clearStale() {
+ p.c.DeleteFunc(func(key K, v V) bool {
+ isStale := resource.IsStaleAny(v)
+ if isStale {
+ p.trace.Log(
+ logg.StringFunc(
+ func() string {
+ return fmt.Sprintf("second pass: clearing cache key %v", key)
+ },
+ ),
+ )
+ }
+
+ return isStale
+ })
+}
+
+// adjustMaxSize adjusts the max size of the and returns the number of items evicted.
+func (p *Partition[K, V]) adjustMaxSize(newMaxSize int) int {
+ if newMaxSize < minMaxSize {
+ newMaxSize = minMaxSize
+ }
+ p.maxSize = newMaxSize
+ // fmt.Println("Adjusting max size of partition from", oldMaxSize, "to", newMaxSize)
+ return p.c.Resize(newMaxSize)
+}
+
+func (p *Partition[K, V]) getMaxSize() int {
+ return p.maxSize
+}
+
+func (p *Partition[K, V]) getOptions() OptionsPartition {
+ return p.opts
+}
+
+func (p *Partition[K, V]) Clear() {
+ p.c.DeleteFunc(func(key K, v V) bool {
+ return true
+ })
+}
+
+func (p *Partition[K, V]) Get(ctx context.Context, key K) (V, bool) {
+ return p.c.Get(key)
+}
+
+type PartitionManager interface {
+ adjustMaxSize(addend int) int
+ getMaxSize() int
+ getOptions() OptionsPartition
+ clearOnRebuild(changeset ...identity.Identity)
+ clearMatching(predicate func(k, v any) bool)
+ clearStale()
+}
+
+const (
+ ClearOnRebuild ClearWhen = iota + 1
+ ClearOnChange
+ ClearNever
+)
+
+type ClearWhen int
+
+type stats struct {
+ opts Options
+ memstatsCurrent runtime.MemStats
+ currentMaxSize int
+ availableMemory uint64
+
+ adjustmentFactor float64
+}
+
+func (s *stats) adjustCurrentMaxSize() bool {
+ newCurrentMaxSize := int(math.Floor(float64(s.opts.MaxSize) * s.adjustmentFactor))
+
+ if newCurrentMaxSize < s.opts.MaxSize {
+ newCurrentMaxSize = int(s.opts.MinMaxSize)
+ }
+ changed := newCurrentMaxSize != s.currentMaxSize
+ s.currentMaxSize = newCurrentMaxSize
+ return changed
+}
+
+// CleanKey turns s into a format suitable for a cache key for this package.
+// The key will be a Unix-styled path with a leading slash but no trailing slash.
+func CleanKey(s string) string {
+ return path.Clean(paths.ToSlashPreserveLeading(s))
+}
diff --git a/cache/dynacache/dynacache_test.go b/cache/dynacache/dynacache_test.go
new file mode 100644
index 00000000000..53de2385e84
--- /dev/null
+++ b/cache/dynacache/dynacache_test.go
@@ -0,0 +1,175 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package dynacache
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ resource.StaleInfo = (*testItem)(nil)
+ _ identity.Identity = (*testItem)(nil)
+)
+
+type testItem struct {
+ name string
+ isStale bool
+}
+
+func (t testItem) IsStale() bool {
+ return t.isStale
+}
+
+func (t testItem) IdentifierBase() string {
+ return t.name
+}
+
+func TestCache(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ cache := New(Options{
+ Log: loggers.NewDefault(),
+ })
+
+ c.Cleanup(func() {
+ cache.Stop()
+ })
+
+ opts := OptionsPartition{Weight: 30}
+
+ c.Assert(cache, qt.Not(qt.IsNil))
+
+ p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts)
+ c.Assert(p1, qt.Not(qt.IsNil))
+
+ p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts)
+
+ c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "foo bar", opts) }, qt.PanicMatches, ".*invalid partition name.*")
+ c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 1234}) }, qt.PanicMatches, ".*invalid Weight.*")
+
+ c.Assert(p2, qt.Equals, p1)
+
+ p3 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", opts)
+ c.Assert(p3, qt.Not(qt.IsNil))
+ c.Assert(p3, qt.Not(qt.Equals), p1)
+
+ c.Assert(func() { New(Options{}) }, qt.PanicMatches, ".*nil Log.*")
+}
+
+func TestCalculateMaxSizePerPartition(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ c.Assert(calculateMaxSizePerPartition(1000, 500, 5), qt.Equals, 200)
+ c.Assert(calculateMaxSizePerPartition(1000, 250, 5), qt.Equals, 400)
+ c.Assert(func() { calculateMaxSizePerPartition(1000, 250, 0) }, qt.PanicMatches, ".*must be > 0.*")
+ c.Assert(func() { calculateMaxSizePerPartition(1000, 0, 1) }, qt.PanicMatches, ".*must be > 0.*")
+}
+
+func TestCleanKey(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(CleanKey("a/b/c"), qt.Equals, "/a/b/c")
+ c.Assert(CleanKey("/a/b/c"), qt.Equals, "/a/b/c")
+ c.Assert(CleanKey("a/b/c/"), qt.Equals, "/a/b/c")
+ c.Assert(CleanKey(filepath.FromSlash("/a/b/c/")), qt.Equals, "/a/b/c")
+}
+
+func newTestCache(t *testing.T) *Cache {
+ cache := New(
+ Options{
+ Log: loggers.NewDefault(),
+ },
+ )
+
+ p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", OptionsPartition{Weight: 30, ClearWhen: ClearOnRebuild})
+ p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 30, ClearWhen: ClearOnChange})
+
+ p1.GetOrCreate("clearOnRebuild", func(string) (testItem, error) {
+ return testItem{}, nil
+ })
+
+ p2.GetOrCreate("clearBecauseStale", func(string) (testItem, error) {
+ return testItem{
+ isStale: true,
+ }, nil
+ })
+
+ p2.GetOrCreate("clearBecauseIdentityChanged", func(string) (testItem, error) {
+ return testItem{
+ name: "changed",
+ }, nil
+ })
+
+ p2.GetOrCreate("clearNever", func(string) (testItem, error) {
+ return testItem{
+ isStale: false,
+ }, nil
+ })
+
+ t.Cleanup(func() {
+ cache.Stop()
+ })
+
+ return cache
+}
+
+func TestClear(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ predicateAll := func(string) bool {
+ return true
+ }
+
+ cache := newTestCache(t)
+
+ c.Assert(cache.Keys(predicateAll), qt.HasLen, 4)
+
+ cache.ClearOnRebuild()
+
+ // Stale items are always cleared.
+ c.Assert(cache.Keys(predicateAll), qt.HasLen, 2)
+
+ cache = newTestCache(t)
+ cache.ClearOnRebuild(identity.StringIdentity("changed"))
+
+ c.Assert(cache.Keys(nil), qt.HasLen, 1)
+
+ cache = newTestCache(t)
+
+ cache.ClearMatching(func(k, v any) bool {
+ return k.(string) == "clearOnRebuild"
+ })
+
+ c.Assert(cache.Keys(predicateAll), qt.HasLen, 3)
+
+ cache.adjustCurrentMaxSize()
+}
+
+func TestAdjustCurrentMaxSize(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ cache := newTestCache(t)
+ alloc := cache.stats.memstatsCurrent.Alloc
+ cache.adjustCurrentMaxSize()
+ c.Assert(cache.stats.memstatsCurrent.Alloc, qt.Not(qt.Equals), alloc)
+}
diff --git a/cache/filecache/filecache.go b/cache/filecache/filecache.go
index 414478ee2ec..093d2941c11 100644
--- a/cache/filecache/filecache.go
+++ b/cache/filecache/filecache.go
@@ -24,6 +24,7 @@ import (
"time"
"github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers"
@@ -109,7 +110,7 @@ func (l *lockedFile) Close() error {
func (c *Cache) init() error {
c.initOnce.Do(func() {
// Create the base dir if it does not exist.
- if err := c.Fs.MkdirAll("", 0777); err != nil && !os.IsExist(err) {
+ if err := c.Fs.MkdirAll("", 0o777); err != nil && !os.IsExist(err) {
c.initErr = err
}
})
@@ -146,7 +147,8 @@ func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) {
// it when done.
func (c *Cache) ReadOrCreate(id string,
read func(info ItemInfo, r io.ReadSeeker) error,
- create func(info ItemInfo, w io.WriteCloser) error) (info ItemInfo, err error) {
+ create func(info ItemInfo, w io.WriteCloser) error,
+) (info ItemInfo, err error) {
if err := c.init(); err != nil {
return ItemInfo{}, err
}
@@ -380,7 +382,7 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) {
baseDir := v.DirCompiled
- bfs := afero.NewBasePathFs(cfs, baseDir)
+ bfs := hugofs.NewBasePathFs(cfs, baseDir)
var pruneAllRootDir string
if k == CacheKeyModules {
diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go
index 61f9eda6429..59fb0927678 100644
--- a/cache/filecache/filecache_test.go
+++ b/cache/filecache/filecache_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -17,7 +17,6 @@ import (
"errors"
"fmt"
"io"
- "path/filepath"
"strings"
"sync"
"testing"
@@ -86,17 +85,8 @@ dir = ":cacheDir/c"
cache := caches.Get("GetJSON")
c.Assert(cache, qt.Not(qt.IsNil))
- bfs, ok := cache.Fs.(*afero.BasePathFs)
- c.Assert(ok, qt.Equals, true)
- filename, err := bfs.RealPath("key")
- c.Assert(err, qt.IsNil)
-
cache = caches.Get("Images")
c.Assert(cache, qt.Not(qt.IsNil))
- bfs, ok = cache.Fs.(*afero.BasePathFs)
- c.Assert(ok, qt.Equals, true)
- filename, _ = bfs.RealPath("key")
- c.Assert(filename, qt.Equals, filepath.FromSlash("_gen/images/key"))
rf := func(s string) func() (io.ReadCloser, error) {
return func() (io.ReadCloser, error) {
diff --git a/cache/filecache/integration_test.go b/cache/filecache/integration_test.go
index a8a45988e02..1e920c29f28 100644
--- a/cache/filecache/integration_test.go
+++ b/cache/filecache/integration_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -15,7 +15,6 @@ package filecache_test
import (
"path/filepath"
-
"testing"
"time"
@@ -47,7 +46,6 @@ title: "Home"
_, err := b.H.BaseFs.ResourcesCache.Stat(filepath.Join("_gen", "images"))
b.Assert(err, qt.IsNil)
-
}
func TestPruneImages(t *testing.T) {
@@ -55,6 +53,7 @@ func TestPruneImages(t *testing.T) {
// TODO(bep)
t.Skip("skip flaky test on CI server")
}
+ t.Skip("skip flaky test")
files := `
-- hugo.toml --
baseURL = "https://example.com"
@@ -92,7 +91,7 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA
// TODO(bep) we need a way to test full rebuilds.
// For now, just sleep a little so the cache elements expires.
- time.Sleep(300 * time.Millisecond)
+ time.Sleep(500 * time.Millisecond)
b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build()
@@ -104,5 +103,4 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA
b.Assert(err, qt.Not(qt.IsNil))
_, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
b.Assert(err, qt.IsNil)
-
}
diff --git a/cache/namedmemcache/named_cache.go b/cache/namedmemcache/named_cache.go
deleted file mode 100644
index 7fb4fe8edb8..00000000000
--- a/cache/namedmemcache/named_cache.go
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package namedmemcache provides a memory cache with a named lock. This is suitable
-// for situations where creating the cached resource can be time consuming or otherwise
-// resource hungry, or in situations where a "once only per key" is a requirement.
-package namedmemcache
-
-import (
- "sync"
-
- "github.com/BurntSushi/locker"
-)
-
-// Cache holds the cached values.
-type Cache struct {
- nlocker *locker.Locker
- cache map[string]cacheEntry
- mu sync.RWMutex
-}
-
-type cacheEntry struct {
- value any
- err error
-}
-
-// New creates a new cache.
-func New() *Cache {
- return &Cache{
- nlocker: locker.NewLocker(),
- cache: make(map[string]cacheEntry),
- }
-}
-
-// Clear clears the cache state.
-func (c *Cache) Clear() {
- c.mu.Lock()
- defer c.mu.Unlock()
-
- c.cache = make(map[string]cacheEntry)
- c.nlocker = locker.NewLocker()
-}
-
-// GetOrCreate tries to get the value with the given cache key, if not found
-// create will be called and cached.
-// This method is thread safe. It also guarantees that the create func for a given
-// key is invoked only once for this cache.
-func (c *Cache) GetOrCreate(key string, create func() (any, error)) (any, error) {
- c.mu.RLock()
- entry, found := c.cache[key]
- c.mu.RUnlock()
-
- if found {
- return entry.value, entry.err
- }
-
- c.nlocker.Lock(key)
- defer c.nlocker.Unlock(key)
-
- // Create it.
- value, err := create()
-
- c.mu.Lock()
- c.cache[key] = cacheEntry{value: value, err: err}
- c.mu.Unlock()
-
- return value, err
-}
diff --git a/cache/namedmemcache/named_cache_test.go b/cache/namedmemcache/named_cache_test.go
deleted file mode 100644
index 2db923d7659..00000000000
--- a/cache/namedmemcache/named_cache_test.go
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package namedmemcache
-
-import (
- "fmt"
- "sync"
- "testing"
-
- qt "github.com/frankban/quicktest"
-)
-
-func TestNamedCache(t *testing.T) {
- t.Parallel()
- c := qt.New(t)
-
- cache := New()
-
- counter := 0
- create := func() (any, error) {
- counter++
- return counter, nil
- }
-
- for i := 0; i < 5; i++ {
- v1, err := cache.GetOrCreate("a1", create)
- c.Assert(err, qt.IsNil)
- c.Assert(v1, qt.Equals, 1)
- v2, err := cache.GetOrCreate("a2", create)
- c.Assert(err, qt.IsNil)
- c.Assert(v2, qt.Equals, 2)
- }
-
- cache.Clear()
-
- v3, err := cache.GetOrCreate("a2", create)
- c.Assert(err, qt.IsNil)
- c.Assert(v3, qt.Equals, 3)
-}
-
-func TestNamedCacheConcurrent(t *testing.T) {
- t.Parallel()
-
- c := qt.New(t)
-
- var wg sync.WaitGroup
-
- cache := New()
-
- create := func(i int) func() (any, error) {
- return func() (any, error) {
- return i, nil
- }
- }
-
- for i := 0; i < 10; i++ {
- wg.Add(1)
- go func() {
- defer wg.Done()
- for j := 0; j < 100; j++ {
- id := fmt.Sprintf("id%d", j)
- v, err := cache.GetOrCreate(id, create(j))
- c.Assert(err, qt.IsNil)
- c.Assert(v, qt.Equals, j)
- }
- }()
- }
- wg.Wait()
-}
diff --git a/commands/commandeer.go b/commands/commandeer.go
index 5d414b04a29..1aac08c42f3 100644
--- a/commands/commandeer.go
+++ b/commands/commandeer.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -259,7 +259,7 @@ func (r *rootCommand) ConfigFromProvider(key int32, cfg config.Provider) (*commo
publishDirStatic := cfg.GetString("publishDirStatic")
workingDir := cfg.GetString("workingDir")
absPublishDirStatic := paths.AbsPathify(workingDir, publishDirStatic)
- staticFs := afero.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic)
+ staticFs := hugofs.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic)
// Serve from both the static and dynamic fs,
// the first will take priority.
@@ -405,8 +405,14 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
return err
}
- r.commonConfigs = lazycache.New[int32, *commonConfig](lazycache.Options{MaxEntries: 5})
- r.hugoSites = lazycache.New[int32, *hugolib.HugoSites](lazycache.Options{MaxEntries: 5})
+ r.commonConfigs = lazycache.New(lazycache.Options[int32, *commonConfig]{MaxEntries: 5})
+ // We don't want to keep stale HugoSites in memory longer than needed.
+ r.hugoSites = lazycache.New(lazycache.Options[int32, *hugolib.HugoSites]{
+ MaxEntries: 1,
+ OnEvict: func(key int32, value *hugolib.HugoSites) {
+ value.Close()
+ },
+ })
return nil
}
diff --git a/commands/commands.go b/commands/commands.go
index 9d707b84189..e21d743ab90 100644
--- a/commands/commands.go
+++ b/commands/commands.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -37,5 +37,4 @@ func newExec() (*simplecobra.Exec, error) {
}
return simplecobra.New(rootCmd)
-
}
diff --git a/commands/config.go b/commands/config.go
index 63ee4f7c8d5..dfe54cba246 100644
--- a/commands/config.go
+++ b/commands/config.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -37,7 +37,6 @@ func newConfigCommand() *configCommand {
&configMountsCommand{},
},
}
-
}
type configCommand struct {
@@ -190,7 +189,6 @@ func (m *configModMounts) MarshalJSON() ([]byte, error) {
Dir: m.m.Dir(),
Mounts: mounts,
})
-
}
type configMountsCommand struct {
diff --git a/commands/convert.go b/commands/convert.go
index 702c9227faf..c81ec792abc 100644
--- a/commands/convert.go
+++ b/commands/convert.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -134,7 +134,7 @@ func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, tar
}
}
- if p.File().IsZero() {
+ if p.File() == nil {
// No content file.
return nil
}
@@ -209,7 +209,7 @@ func (c *convertCommand) convertContents(format metadecoders.Format) error {
var pagesBackedByFile page.Pages
for _, p := range site.AllPages() {
- if p.File().IsZero() {
+ if p.File() == nil {
continue
}
pagesBackedByFile = append(pagesBackedByFile, p)
diff --git a/commands/deploy.go b/commands/deploy.go
index ce1af95469c..ca6e4d60edb 100644
--- a/commands/deploy.go
+++ b/commands/deploy.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -14,7 +14,7 @@
//go:build !nodeploy
// +build !nodeploy
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -38,7 +38,6 @@ import (
)
func newDeployCommand() simplecobra.Commander {
-
return &simpleCommand{
name: "deploy",
short: "Deploy your site to a Cloud provider.",
diff --git a/commands/deploy_off.go b/commands/deploy_off.go
index 3150dba1626..8a481bd968a 100644
--- a/commands/deploy_off.go
+++ b/commands/deploy_off.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -14,7 +14,7 @@
//go:build nodeploy
// +build nodeploy
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/commands/env.go b/commands/env.go
index 0652deb8747..8e4f03c55b3 100644
--- a/commands/env.go
+++ b/commands/env.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/commands/gen.go b/commands/gen.go
index 534eb0df5a1..11c32d7781d 100644
--- a/commands/gen.go
+++ b/commands/gen.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -101,7 +101,7 @@ See https://xyproto.github.io/splash/docs/all.html for a preview of the availabl
}
if found, _ := helpers.Exists(genmandir, hugofs.Os); !found {
r.Println("Directory", genmandir, "does not exist, creating...")
- if err := hugofs.Os.MkdirAll(genmandir, 0777); err != nil {
+ if err := hugofs.Os.MkdirAll(genmandir, 0o777); err != nil {
return err
}
}
@@ -150,7 +150,7 @@ url: %s
}
if found, _ := helpers.Exists(gendocdir, hugofs.Os); !found {
r.Println("Directory", gendocdir, "does not exist, creating...")
- if err := hugofs.Os.MkdirAll(gendocdir, 0777); err != nil {
+ if err := hugofs.Os.MkdirAll(gendocdir, 0o777); err != nil {
return err
}
}
@@ -177,7 +177,6 @@ url: %s
cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
},
}
-
}
var docsHelperTarget string
@@ -241,7 +240,6 @@ url: %s
newDocsHelper(),
},
}
-
}
type genCommand struct {
diff --git a/commands/helpers.go b/commands/helpers.go
index 3b0c501592b..a13bdebc234 100644
--- a/commands/helpers.go
+++ b/commands/helpers.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -110,12 +110,11 @@ func flagsToCfgWithAdditionalConfigBase(cd *simplecobra.Commandeer, cfg config.P
})
return cfg
-
}
func mkdir(x ...string) {
p := filepath.Join(x...)
- err := os.MkdirAll(p, 0777) // before umask
+ err := os.MkdirAll(p, 0o777) // before umask
if err != nil {
log.Fatal(err)
}
diff --git a/commands/hugo_windows.go b/commands/hugo_windows.go
index 169c6288f07..c354e889d42 100644
--- a/commands/hugo_windows.go
+++ b/commands/hugo_windows.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/commands/hugobuilder.go b/commands/hugobuilder.go
index d2b43cc7707..41f42ae6d42 100644
--- a/commands/hugobuilder.go
+++ b/commands/hugobuilder.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,6 +24,7 @@ import (
"runtime/trace"
"strings"
"sync"
+ "sync/atomic"
"time"
"github.com/bep/logg"
@@ -34,6 +35,7 @@ import (
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/terminal"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/config"
@@ -83,7 +85,6 @@ func (c *hugoBuilder) withConf(fn func(conf *commonConfig)) {
c.confmu.Lock()
defer c.confmu.Unlock()
fn(c.conf)
-
}
type hugoBuilderErrState struct {
@@ -135,46 +136,12 @@ func (c *hugoBuilder) errCount() int {
// getDirList provides NewWatcher() with a list of directories to watch for changes.
func (c *hugoBuilder) getDirList() ([]string, error) {
- var filenames []string
-
- walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
- if err != nil {
- c.r.logger.Errorln("walker: ", err)
- return nil
- }
-
- if fi.IsDir() {
- if fi.Name() == ".git" ||
- fi.Name() == "node_modules" || fi.Name() == "bower_components" {
- return filepath.SkipDir
- }
-
- filenames = append(filenames, fi.Meta().Filename)
- }
-
- return nil
- }
-
h, err := c.hugo()
if err != nil {
return nil, err
}
- watchFiles := h.PathSpec.BaseFs.WatchDirs()
- for _, fi := range watchFiles {
- if !fi.IsDir() {
- filenames = append(filenames, fi.Meta().Filename)
- continue
- }
-
- w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.r.logger, Info: fi, WalkFn: walkFn})
- if err := w.Walk(); err != nil {
- c.r.logger.Errorln("walker: ", err)
- }
- }
- filenames = helpers.UniqueStringsSorted(filenames)
-
- return filenames, nil
+ return helpers.UniqueStringsSorted(h.PathSpec.BaseFs.WatchFilenames()), nil
}
func (c *hugoBuilder) initCPUProfile() (func(), error) {
@@ -441,7 +408,7 @@ func (c *hugoBuilder) copyStatic() (map[string]uint64, error) {
}
func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
- infol := c.r.logger.InfoCommand("copy static")
+ infol := c.r.logger.InfoCommand("static")
publishDir := helpers.FilePathSeparator
if sourceFs.PublishFolder != "" {
@@ -467,11 +434,11 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint
if syncer.Delete {
infol.Logf("removing all files from destination that don't exist in static dirs")
- syncer.DeleteFilter = func(f os.FileInfo) bool {
+ syncer.DeleteFilter = func(f fsync.FileInfo) bool {
return f.IsDir() && strings.HasPrefix(f.Name(), ".")
}
}
- infol.Logf("syncing static files to %s", publishDir)
+ start := time.Now()
// because we are using a baseFs (to get the union right).
// set sync src to root
@@ -479,9 +446,10 @@ func (c *hugoBuilder) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint
if err != nil {
return 0, err
}
+ loggers.TimeTrackf(infol, start, nil, "syncing static files to %s", publishDir)
- // Sync runs Stat 3 times for every source file (which sounds much)
- numFiles := fs.statCounter / 3
+ // Sync runs Stat 2 times for every source file.
+ numFiles := fs.statCounter / 2
return numFiles, err
}
@@ -652,13 +620,31 @@ func (c *hugoBuilder) handleBuildErr(err error, msg string) {
func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
staticSyncer *staticSyncer,
evs []fsnotify.Event,
- configSet map[string]bool) {
+ configSet map[string]bool,
+) {
defer func() {
c.errState.setWasErr(false)
}()
var isHandled bool
+ // Filter out ghost events (from deleted, renamed directories).
+ // This seems to be a bug in fsnotify, or possibly MacOS.
+ var n int
+ for _, ev := range evs {
+ keep := true
+ if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Write) {
+ if _, err := os.Stat(ev.Name); err != nil {
+ keep = false
+ }
+ }
+ if keep {
+ evs[n] = ev
+ n++
+ }
+ }
+ evs = evs[:n]
+
for _, ev := range evs {
isConfig := configSet[ev.Name]
configChangeType := configChangeConfig
@@ -726,48 +712,25 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
return
}
- c.r.logger.Infoln("Received System Events:", evs)
+ c.r.logger.Debugln("Received System Events:", evs)
staticEvents := []fsnotify.Event{}
dynamicEvents := []fsnotify.Event{}
- filtered := []fsnotify.Event{}
h, err := c.hugo()
if err != nil {
c.r.logger.Errorln("Error getting the Hugo object:", err)
return
}
+ n = 0
for _, ev := range evs {
if h.ShouldSkipFileChangeEvent(ev) {
continue
}
- // Check the most specific first, i.e. files.
- contentMapped := h.ContentChanges.GetSymbolicLinkMappings(ev.Name)
- if len(contentMapped) > 0 {
- for _, mapped := range contentMapped {
- filtered = append(filtered, fsnotify.Event{Name: mapped, Op: ev.Op})
- }
- continue
- }
-
- // Check for any symbolic directory mapping.
-
- dir, name := filepath.Split(ev.Name)
-
- contentMapped = h.ContentChanges.GetSymbolicLinkMappings(dir)
-
- if len(contentMapped) == 0 {
- filtered = append(filtered, ev)
- continue
- }
-
- for _, mapped := range contentMapped {
- mappedFilename := filepath.Join(mapped, name)
- filtered = append(filtered, fsnotify.Event{Name: mappedFilename, Op: ev.Op})
- }
+ evs[n] = ev
+ n++
}
-
- evs = filtered
+ evs = evs[:n]
for _, ev := range evs {
ext := filepath.Ext(ev.Name)
@@ -788,6 +751,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
if istemp {
continue
}
+
if h.Deps.SourceSpec.IgnoreFile(ev.Name) {
continue
}
@@ -811,7 +775,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
continue
}
- walkAdder := func(path string, f hugofs.FileMetaInfo, err error) error {
+ walkAdder := func(path string, f hugofs.FileMetaInfo) error {
if f.IsDir() {
c.r.logger.Println("adding created directory to watchlist", path)
if err := watcher.Add(path); err != nil {
@@ -827,11 +791,10 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
}
// recursively add new directories to watch list
- // When mkdir -p is used, only the top directory triggers an event (at least on OSX)
- if ev.Op&fsnotify.Create == fsnotify.Create {
+ if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Rename) {
c.withConf(func(conf *commonConfig) {
if s, err := conf.fs.Source.Stat(ev.Name); err == nil && s.Mode().IsDir() {
- _ = helpers.SymbolicWalk(conf.fs.Source, ev.Name, walkAdder)
+ _ = helpers.Walk(conf.fs.Source, ev.Name, walkAdder)
}
})
}
@@ -872,7 +835,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
return
}
path := h.BaseFs.SourceFilesystems.MakeStaticPathRelative(ev.Name)
- path = h.RelURL(helpers.ToSlashTrimLeading(path), false)
+ path = h.RelURL(paths.ToSlashTrimLeading(path), false)
livereload.RefreshPath(path)
} else {
@@ -909,7 +872,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
// Nothing has changed.
return
} else if len(changed) == 1 {
- pathToRefresh := h.PathSpec.RelURL(helpers.ToSlashTrimLeading(changed[0]), false)
+ pathToRefresh := h.PathSpec.RelURL(paths.ToSlashTrimLeading(changed[0]), false)
livereload.RefreshPath(pathToRefresh)
} else {
livereload.ForceRefresh()
@@ -944,7 +907,6 @@ func (c *hugoBuilder) hugo() (*hugolib.HugoSites, error) {
var err error
h, err = c.r.HugFromConfig(conf)
return err
-
}); err != nil {
return nil, err
}
@@ -1000,6 +962,7 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error
}
if len(conf.configs.LoadingInfo.ConfigFiles) == 0 {
+ //lint:ignore ST1005 end user message.
return errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\nRun `hugo help new` for details.")
}
@@ -1011,15 +974,16 @@ func (c *hugoBuilder) loadConfig(cd *simplecobra.Commandeer, running bool) error
}
return nil
-
}
+var rebuildCounter atomic.Uint64
+
func (c *hugoBuilder) printChangeDetected(typ string) {
msg := "\nChange"
if typ != "" {
msg += " of " + typ
}
- msg += " detected, rebuilding site."
+ msg += fmt.Sprintf(" detected, rebuilding site (#%d).", rebuildCounter.Add(1))
c.r.logger.Println(msg)
const layout = "2006-01-02 15:04:05.000 -0700"
@@ -1034,25 +998,12 @@ func (c *hugoBuilder) rebuildSites(events []fsnotify.Event) error {
}
}
c.errState.setBuildErr(nil)
- visited := c.visitedURLs.PeekAllSet()
h, err := c.hugo()
if err != nil {
return err
}
- if c.fastRenderMode {
- c.withConf(func(conf *commonConfig) {
- // Make sure we always render the home pages
- for _, l := range conf.configs.ConfigLangs() {
- langPath := l.LanguagePrefix()
- if langPath != "" {
- langPath = langPath + "/"
- }
- home := h.PrependBasePath("/"+langPath, false)
- visited[home] = true
- }
- })
- }
- return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: visited, ErrRecovery: c.errState.wasErr()}, events...)
+
+ return h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: c.visitedURLs, ErrRecovery: c.errState.wasErr()}, events...)
}
func (c *hugoBuilder) reloadConfig() error {
diff --git a/commands/import.go b/commands/import.go
index 18ed7b328a4..947b6d11f98 100644
--- a/commands/import.go
+++ b/commands/import.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -23,7 +23,6 @@ import (
"os"
"path/filepath"
"regexp"
-
"strconv"
"strings"
"time"
@@ -66,7 +65,6 @@ Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root
}
return c
-
}
type importCommand struct {
@@ -312,7 +310,7 @@ func (c *importCommand) convertJekyllPost(path, relPath, targetDir string, draft
targetFile := filepath.Join(targetDir, relPath)
targetParentDir := filepath.Dir(targetFile)
- os.MkdirAll(targetParentDir, 0777)
+ os.MkdirAll(targetParentDir, 0o777)
contentBytes, err := os.ReadFile(path)
if err != nil {
@@ -398,7 +396,6 @@ func (c *importCommand) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyl
}
func (c *importCommand) importFromJekyll(args []string) error {
-
jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
if err != nil {
return newUserError("path error:", args[0])
@@ -429,11 +426,7 @@ func (c *importCommand) importFromJekyll(args []string) error {
c.r.Println("Importing...")
fileCount := 0
- callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
- if err != nil {
- return err
- }
-
+ callback := func(path string, fi hugofs.FileMetaInfo) error {
if fi.IsDir() {
return nil
}
@@ -462,7 +455,7 @@ func (c *importCommand) importFromJekyll(args []string) error {
for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
if hasAnyPostInDir {
- if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
+ if err = helpers.Walk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
return err
}
}
diff --git a/commands/list.go b/commands/list.go
index 6690ea9ee1a..41a45e40223 100644
--- a/commands/list.go
+++ b/commands/list.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -31,7 +31,6 @@ import (
// newListCommand creates a new list command and its subcommands.
func newListCommand() *listCommand {
-
createRecord := func(workingDir string, p page.Page) []string {
return []string{
filepath.ToSlash(strings.TrimPrefix(p.File().Filename(), workingDir+string(os.PathSeparator))),
@@ -83,7 +82,6 @@ func newListCommand() *listCommand {
}
return nil
-
}
return &listCommand{
@@ -94,11 +92,10 @@ func newListCommand() *listCommand {
long: `List all of the drafts in your content directory.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
shouldInclude := func(p page.Page) bool {
- if !p.Draft() || p.File().IsZero() {
+ if !p.Draft() || p.File() == nil {
return false
}
return true
-
}
return list(cd, r, shouldInclude,
"buildDrafts", true,
@@ -113,11 +110,10 @@ func newListCommand() *listCommand {
long: `List all of the posts in your content directory which will be posted in the future.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
shouldInclude := func(p page.Page) bool {
- if !resource.IsFuture(p) || p.File().IsZero() {
+ if !resource.IsFuture(p) || p.File() == nil {
return false
}
return true
-
}
return list(cd, r, shouldInclude,
"buildFuture", true,
@@ -131,7 +127,7 @@ func newListCommand() *listCommand {
long: `List all of the posts in your content directory which has already expired.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
shouldInclude := func(p page.Page) bool {
- if !resource.IsExpired(p) || p.File().IsZero() {
+ if !resource.IsExpired(p) || p.File() == nil {
return false
}
return true
@@ -148,14 +144,13 @@ func newListCommand() *listCommand {
long: `List all of the posts in your content directory, include drafts, future and expired pages.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
shouldInclude := func(p page.Page) bool {
- return !p.File().IsZero()
+ return p.File() != nil
}
return list(cd, r, shouldInclude, "buildDrafts", true, "buildFuture", true, "buildExpired", true)
},
},
},
}
-
}
type listCommand struct {
diff --git a/commands/mod.go b/commands/mod.go
index 20b9d396079..d64d2a98321 100644
--- a/commands/mod.go
+++ b/commands/mod.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -69,7 +69,7 @@ so this may/will change in future versions of Hugo.
if err != nil {
return err
}
- return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
+ return npm.Pack(h.BaseFs.ProjectSourceFs, h.BaseFs.AssetsWithDuplicatesPreserved.Fs)
},
},
},
diff --git a/commands/new.go b/commands/new.go
index 8e348366dd8..79d2c9e7edf 100644
--- a/commands/new.go
+++ b/commands/new.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -64,7 +64,6 @@ Ensure you run this within the root directory of your site.`,
cmd.Flags().String("editor", "", "edit new content with this editor, if provided")
cmd.Flags().BoolVarP(&force, "force", "f", false, "overwrite file if it already exists")
applyLocalFlagsBuildConfig(cmd, r)
-
},
},
&simpleCommand{
@@ -143,7 +142,6 @@ according to your needs.`,
}
return c
-
}
type newCommand struct {
diff --git a/commands/release.go b/commands/release.go
index 54cf936e861..1d1aaad53d5 100644
--- a/commands/release.go
+++ b/commands/release.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -24,7 +24,6 @@ import (
// Note: This is a command only meant for internal use and must be run
// via "go run -tags release main.go release" on the actual code base that is in the release.
func newReleaseCommand() simplecobra.Commander {
-
var (
step int
skipPush bool
diff --git a/commands/server.go b/commands/server.go
index 63c09fccd50..97cf405b7ef 100644
--- a/commands/server.go
+++ b/commands/server.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -27,20 +27,19 @@ import (
"net/http"
"net/url"
"os"
- "sync"
- "sync/atomic"
-
- "github.com/bep/mclib"
-
"os/signal"
"path"
"path/filepath"
"regexp"
"strconv"
"strings"
+ "sync"
+ "sync/atomic"
"syscall"
"time"
+ "github.com/bep/mclib"
+
"github.com/bep/debounce"
"github.com/bep/simplecobra"
"github.com/fsnotify/fsnotify"
@@ -83,10 +82,14 @@ const (
)
func newHugoBuilder(r *rootCommand, s *serverCommand, onConfigLoaded ...func(reloaded bool) error) *hugoBuilder {
+ var visitedURLs *types.EvictingStringQueue
+ if s != nil && !s.disableFastRender {
+ visitedURLs = types.NewEvictingStringQueue(20)
+ }
return &hugoBuilder{
r: r,
s: s,
- visitedURLs: types.NewEvictingStringQueue(100),
+ visitedURLs: visitedURLs,
fullRebuildSem: semaphore.NewWeighted(1),
debounce: debounce.New(4 * time.Second),
onConfigLoaded: func(reloaded bool) error {
@@ -120,7 +123,6 @@ func newServerCommand() *serverCommand {
},
withc: func(cmd *cobra.Command, r *rootCommand) {
cmd.Flags().BoolVar(&uninstall, "uninstall", false, "Uninstall the local CA (but do not delete it).")
-
},
},
},
@@ -219,7 +221,7 @@ func (f *fileChangeDetector) filterIrrelevant(in []string) []string {
}
type fileServer struct {
- baseURLs []string
+ baseURLs []urls.BaseURL
roots []string
errorTemplate func(err any) (io.Reader, error)
c *serverCommand
@@ -255,12 +257,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
r.Println("Running in Fast Render Mode. For full rebuilds on change: hugo server --disableFastRender")
}
- // We're only interested in the path
- u, err := url.Parse(baseURL)
- if err != nil {
- return nil, nil, "", "", fmt.Errorf("invalid baseURL: %w", err)
- }
-
decorate := func(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if f.c.showErrorInBrowser {
@@ -280,7 +276,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
port = lrport
}
})
- lr := *u
+ lr := baseURL.URL()
lr.Host = fmt.Sprintf("%s:%d", lr.Hostname(), port)
fmt.Fprint(w, injectLiveReloadScript(r, lr))
@@ -311,7 +307,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
// This matches Netlify's behaviour and is needed for SPA behaviour.
// See https://docs.netlify.com/routing/redirects/rewrites-proxies/
if !redirect.Force {
- path := filepath.Clean(strings.TrimPrefix(requestURI, u.Path))
+ path := filepath.Clean(strings.TrimPrefix(requestURI, baseURL.Path()))
if root != "" {
path = filepath.Join(root, path)
}
@@ -338,7 +334,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
switch redirect.Status {
case 404:
w.WriteHeader(404)
- file, err := fs.Open(strings.TrimPrefix(redirect.To, u.Path))
+ file, err := fs.Open(strings.TrimPrefix(redirect.To, baseURL.Path()))
if err == nil {
defer file.Close()
io.Copy(w, file)
@@ -347,7 +343,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
}
return
case 200:
- if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, u.Path)); r2 != nil {
+ if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, baseURL.Path())); r2 != nil {
requestURI = redirect.To
r = r2
}
@@ -385,10 +381,10 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
fileserver := decorate(http.FileServer(fs))
mu := http.NewServeMux()
- if u.Path == "" || u.Path == "/" {
+ if baseURL.Path() == "" || baseURL.Path() == "/" {
mu.Handle("/", fileserver)
} else {
- mu.Handle(u.Path, http.StripPrefix(u.Path, fileserver))
+ mu.Handle(baseURL.Path(), http.StripPrefix(baseURL.Path(), fileserver))
}
if r.IsTestRun() {
var shutDownOnce sync.Once
@@ -401,7 +397,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
endpoint := net.JoinHostPort(f.c.serverInterface, strconv.Itoa(port))
- return mu, listener, u.String(), endpoint, nil
+ return mu, listener, baseURL.String(), endpoint, nil
}
func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Request {
@@ -469,7 +465,6 @@ func (c *serverCommand) Name() string {
}
func (c *serverCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
-
// Watch runs its own server as part of the routine
if c.serverWatch {
@@ -676,7 +671,7 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error {
// Create the directory if it doesn't exist.
if _, err := os.Stat(keyDir); os.IsNotExist(err) {
- if err := os.MkdirAll(keyDir, 0777); err != nil {
+ if err := os.MkdirAll(keyDir, 0o777); err != nil {
return err
}
}
@@ -701,7 +696,6 @@ func (c *serverCommand) createCertificates(conf *commonConfig) error {
// Yes, this is unfortunate, but it's currently the only way to use Mkcert as a library.
os.Args = []string{"-cert-file", c.tlsCertFile, "-key-file", c.tlsKeyFile, hostname}
return mclib.RunMain()
-
}
func (c *serverCommand) verifyCert(rootPEM, certPEM []byte, name string) error {
@@ -831,9 +825,9 @@ func (c *serverCommand) partialReRender(urls ...string) error {
c.errState.setWasErr(false)
}()
c.errState.setBuildErr(nil)
- visited := make(map[string]bool)
+ visited := types.NewEvictingStringQueue(len(urls))
for _, url := range urls {
- visited[url] = true
+ visited.Add(url)
}
h, err := c.hugo()
@@ -846,7 +840,7 @@ func (c *serverCommand) partialReRender(urls ...string) error {
func (c *serverCommand) serve() error {
var (
- baseURLs []string
+ baseURLs []urls.BaseURL
roots []string
h *hugolib.HugoSites
)
@@ -863,18 +857,17 @@ func (c *serverCommand) serve() error {
if isMultiHost {
for _, l := range conf.configs.ConfigLangs() {
- baseURLs = append(baseURLs, l.BaseURL().String())
+ baseURLs = append(baseURLs, l.BaseURL())
roots = append(roots, l.Language().Lang)
}
} else {
l := conf.configs.GetFirstLanguageConfig()
- baseURLs = []string{l.BaseURL().String()}
+ baseURLs = []urls.BaseURL{l.BaseURL()}
roots = []string{""}
}
return nil
})
-
if err != nil {
return err
}
@@ -946,13 +939,9 @@ func (c *serverCommand) serve() error {
servers = append(servers, srv)
if doLiveReload {
- u, err := url.Parse(helpers.SanitizeURL(baseURLs[i]))
- if err != nil {
- return err
- }
-
- mu.HandleFunc(u.Path+"/livereload.js", livereload.ServeJS)
- mu.HandleFunc(u.Path+"/livereload", livereload.Handler)
+ baseURL := baseURLs[i]
+ mu.HandleFunc(baseURL.Path()+"livereload.js", livereload.ServeJS)
+ mu.HandleFunc(baseURL.Path()+"livereload", livereload.Handler)
}
c.r.Printf("Web Server is available at %s (bind address %s) %s\n", serverURL, c.serverInterface, roots[i])
wg1.Go(func() error {
@@ -971,8 +960,12 @@ func (c *serverCommand) serve() error {
if c.r.IsTestRun() {
// Write a .ready file to disk to signal ready status.
// This is where the test is run from.
+ var baseURLs []string
+ for _, baseURL := range srv.baseURLs {
+ baseURLs = append(baseURLs, baseURL.String())
+ }
testInfo := map[string]any{
- "baseURLs": srv.baseURLs,
+ "baseURLs": baseURLs,
}
dir := os.Getenv("WORK")
@@ -983,7 +976,7 @@ func (c *serverCommand) serve() error {
if err != nil {
return err
}
- err = os.WriteFile(readyFile, b, 0777)
+ err = os.WriteFile(readyFile, b, 0o777)
if err != nil {
return err
}
@@ -1167,7 +1160,7 @@ func cleanErrorLog(content string) string {
return strings.Join(keep, ": ")
}
-func injectLiveReloadScript(src io.Reader, baseURL url.URL) string {
+func injectLiveReloadScript(src io.Reader, baseURL *url.URL) string {
var b bytes.Buffer
chain := transform.Chain{livereloadinject.New(baseURL)}
chain.Apply(&b, src)
diff --git a/common/constants/constants.go b/common/constants/constants.go
index 6afb9e28316..e4f5a63a279 100644
--- a/common/constants/constants.go
+++ b/common/constants/constants.go
@@ -20,3 +20,24 @@ const (
ErrRemoteGetJSON = "error-remote-getjson"
ErrRemoteGetCSV = "error-remote-getcsv"
)
+
+// Field/method names with special meaning.
+const (
+ FieldRelPermalink = "RelPermalink"
+ FieldPermalink = "Permalink"
+)
+
+// IsFieldRelOrPermalink returns whether the given name is a RelPermalink or Permalink.
+func IsFieldRelOrPermalink(name string) bool {
+ return name == FieldRelPermalink || name == FieldPermalink
+}
+
+// Resource transformations.
+const (
+ ResourceTransformationFingerprint = "fingerprint"
+)
+
+// IsResourceTransformationLinkChange returns whether the given name is a resource transformation that changes the permalink based on the content.
+func IsResourceTransformationPermalinkHash(name string) bool {
+ return name == ResourceTransformationFingerprint
+}
diff --git a/common/hcontext/context.go b/common/hcontext/context.go
new file mode 100644
index 00000000000..9524ef28443
--- /dev/null
+++ b/common/hcontext/context.go
@@ -0,0 +1,46 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hcontext
+
+import "context"
+
+// ContextDispatcher is a generic interface for setting and getting values from a context.
+type ContextDispatcher[T any] interface {
+ Set(ctx context.Context, value T) context.Context
+ Get(ctx context.Context) T
+}
+
+// NewContextDispatcher creates a new ContextDispatcher with the given key.
+func NewContextDispatcher[T any, R comparable](key R) ContextDispatcher[T] {
+ return keyInContext[T, R]{
+ id: key,
+ }
+}
+
+type keyInContext[T any, R comparable] struct {
+ zero T
+ id R
+}
+
+func (f keyInContext[T, R]) Get(ctx context.Context) T {
+ v := ctx.Value(f.id)
+ if v == nil {
+ return f.zero
+ }
+ return v.(T)
+}
+
+func (f keyInContext[T, R]) Set(ctx context.Context, value T) context.Context {
+ return context.WithValue(ctx, f.id, value)
+}
diff --git a/common/herrors/error_locator.go b/common/herrors/error_locator.go
index b880fe04542..1ece0cca4cb 100644
--- a/common/herrors/error_locator.go
+++ b/common/herrors/error_locator.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -74,7 +74,6 @@ func ContainsMatcher(text string) func(m LineMatcher) int {
// ErrorContext contains contextual information about an error. This will
// typically be the lines surrounding some problem in a file.
type ErrorContext struct {
-
// If a match will contain the matched line and up to 2 lines before and after.
// Will be empty if no match.
Lines []string
diff --git a/common/herrors/error_locator_test.go b/common/herrors/error_locator_test.go
index 6135657d8dc..62f15213d30 100644
--- a/common/herrors/error_locator_test.go
+++ b/common/herrors/error_locator_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/herrors/errors.go b/common/herrors/errors.go
index 8e62b2c99b6..59739a86adc 100644
--- a/common/herrors/errors.go
+++ b/common/herrors/errors.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -21,6 +21,7 @@ import (
"os"
"runtime"
"runtime/debug"
+ "time"
)
// PrintStackTrace prints the current stacktrace to w.
@@ -47,6 +48,24 @@ func Recover(args ...any) {
}
}
+// IsTimeoutError returns true if the given error is or contains a TimeoutError.
+func IsTimeoutError(err error) bool {
+ return errors.Is(err, &TimeoutError{})
+}
+
+type TimeoutError struct {
+ Duration time.Duration
+}
+
+func (e *TimeoutError) Error() string {
+ return fmt.Sprintf("timeout after %s", e.Duration)
+}
+
+func (e *TimeoutError) Is(target error) bool {
+ _, ok := target.(*TimeoutError)
+ return ok
+}
+
// IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError.
func IsFeatureNotAvailableError(err error) bool {
return errors.Is(err, &FeatureNotAvailableError{})
diff --git a/common/herrors/errors_test.go b/common/herrors/errors_test.go
index 223782e23ad..2f53a1e89d6 100644
--- a/common/herrors/errors_test.go
+++ b/common/herrors/errors_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -42,5 +42,4 @@ func TestIsFeatureNotAvailableError(t *testing.T) {
c.Assert(IsFeatureNotAvailableError(ErrFeatureNotAvailable), qt.Equals, true)
c.Assert(IsFeatureNotAvailableError(&FeatureNotAvailableError{}), qt.Equals, true)
c.Assert(IsFeatureNotAvailableError(errors.New("asdf")), qt.Equals, false)
-
}
diff --git a/common/herrors/file_error.go b/common/herrors/file_error.go
index f8bcecd34ec..32a6f0081f8 100644
--- a/common/herrors/file_error.go
+++ b/common/herrors/file_error.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -15,13 +15,13 @@ package herrors
import (
"encoding/json"
-
- godartsassv1 "github.com/bep/godartsass"
-
+ "errors"
"fmt"
"io"
"path/filepath"
+ godartsassv1 "github.com/bep/godartsass"
+
"github.com/bep/godartsass/v2"
"github.com/bep/golibsass/libsass/libsasserrors"
"github.com/gohugoio/hugo/common/paths"
@@ -29,8 +29,6 @@ import (
"github.com/pelletier/go-toml/v2"
"github.com/spf13/afero"
"github.com/tdewolff/parse/v2"
-
- "errors"
)
// FileError represents an error when handling a file: Parsing a config file,
@@ -48,6 +46,9 @@ type FileError interface {
// UpdateContent updates the error with a new ErrorContext from the content of the file.
UpdateContent(r io.Reader, linematcher LineMatcherFn) FileError
+
+ // SetFilename sets the filename of the error.
+ SetFilename(filename string) FileError
}
// Unwrapper can unwrap errors created with fmt.Errorf.
@@ -60,6 +61,11 @@ var (
_ Unwrapper = (*fileError)(nil)
)
+func (fe *fileError) SetFilename(filename string) FileError {
+ fe.position.Filename = filename
+ return fe
+}
+
func (fe *fileError) UpdatePosition(pos text.Position) FileError {
oldFilename := fe.Position().Filename
if pos.Filename != "" && fe.fileType == "" {
@@ -115,7 +121,6 @@ func (fe *fileError) UpdateContent(r io.Reader, linematcher LineMatcherFn) FileE
}
return fe
-
}
type fileError struct {
@@ -181,7 +186,6 @@ func NewFileErrorFromName(err error, name string) FileError {
}
return &fileError{cause: err, fileType: fileType, position: pos}
-
}
// NewFileErrorFromPos will use the filename and line number from pos to create a new FileError, wrapping err.
@@ -192,7 +196,6 @@ func NewFileErrorFromPos(err error, pos text.Position) FileError {
_, fileType = paths.FileAndExtNoDelimiter(filepath.Clean(pos.Filename))
}
return &fileError{cause: err, fileType: fileType, position: pos}
-
}
func NewFileErrorFromFileInErr(err error, fs afero.Fs, linematcher LineMatcherFn) FileError {
@@ -249,7 +252,6 @@ func openFile(filename string, fs afero.Fs) (afero.File, string, error) {
}); ok {
realFilename = s.Filename()
}
-
}
f, err2 := fs.Open(filename)
diff --git a/common/herrors/file_error_test.go b/common/herrors/file_error_test.go
index 0b260a2550e..7aca0840543 100644
--- a/common/herrors/file_error_test.go
+++ b/common/herrors/file_error_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -14,12 +14,11 @@
package herrors
import (
+ "errors"
"fmt"
"strings"
"testing"
- "errors"
-
"github.com/gohugoio/hugo/common/text"
qt "github.com/frankban/quicktest"
@@ -48,7 +47,6 @@ func TestNewFileError(t *testing.T) {
c.Assert(errorContext.Lines, qt.DeepEquals, []string{"line 30", "line 31", "line 32", "line 33", "line 34"})
c.Assert(errorContext.LinesPos, qt.Equals, 2)
c.Assert(errorContext.ChromaLexer, qt.Equals, "go-html-template")
-
}
func TestNewFileErrorExtractFromMessage(t *testing.T) {
diff --git a/common/hreflect/helpers.go b/common/hreflect/helpers.go
index 17afbf9127e..b5a8bacc929 100644
--- a/common/hreflect/helpers.go
+++ b/common/hreflect/helpers.go
@@ -1,4 +1,4 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
@@ -23,6 +23,7 @@ import (
"time"
"github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/types"
)
@@ -188,6 +189,20 @@ func IsTime(tp reflect.Type) bool {
return false
}
+// IsValid returns whether v is not nil and a valid value.
+func IsValid(v reflect.Value) bool {
+ if !v.IsValid() {
+ return false
+ }
+
+ switch v.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+ return !v.IsNil()
+ }
+
+ return true
+}
+
// AsTime returns v as a time.Time if possible.
// The given location is only used if the value implements AsTimeProvider (e.g. go-toml local).
// A zero Time and false is returned if this isn't possible.
@@ -217,7 +232,7 @@ func CallMethodByName(cxt context.Context, name string, v reflect.Value) []refle
panic("not supported")
}
first := tp.In(0)
- if first.Implements(ContextInterface) {
+ if IsContextType(first) {
args = append(args, reflect.ValueOf(cxt))
}
}
@@ -236,4 +251,24 @@ func indirectInterface(v reflect.Value) reflect.Value {
return v.Elem()
}
-var ContextInterface = reflect.TypeOf((*context.Context)(nil)).Elem()
+var contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem()
+
+var isContextCache = maps.NewCache[reflect.Type, bool]()
+
+type k string
+
+var contextTypeValue = reflect.TypeOf(context.WithValue(context.Background(), k("key"), 32))
+
+// IsContextType returns whether tp is a context.Context type.
+func IsContextType(tp reflect.Type) bool {
+ if tp == contextTypeValue {
+ return true
+ }
+ if tp == contextInterface {
+ return true
+ }
+
+ return isContextCache.GetOrCreate(tp, func() bool {
+ return tp.Implements(contextInterface)
+ })
+}
diff --git a/common/hreflect/helpers_test.go b/common/hreflect/helpers_test.go
index d16b9b9b3fd..27b774337db 100644
--- a/common/hreflect/helpers_test.go
+++ b/common/hreflect/helpers_test.go
@@ -14,6 +14,7 @@
package hreflect
import (
+ "context"
"reflect"
"testing"
"time"
@@ -40,6 +41,42 @@ func TestGetMethodByName(t *testing.T) {
c.Assert(GetMethodIndexByName(tp, "Foo"), qt.Equals, -1)
}
+func TestIsContextType(t *testing.T) {
+ c := qt.New(t)
+ type k string
+ ctx := context.Background()
+ valueCtx := context.WithValue(ctx, k("key"), 32)
+ c.Assert(IsContextType(reflect.TypeOf(ctx)), qt.IsTrue)
+ c.Assert(IsContextType(reflect.TypeOf(valueCtx)), qt.IsTrue)
+}
+
+func BenchmarkIsContextType(b *testing.B) {
+ type k string
+ b.Run("value", func(b *testing.B) {
+ ctx := context.Background()
+ ctxs := make([]reflect.Type, b.N)
+ for i := 0; i < b.N; i++ {
+ ctxs[i] = reflect.TypeOf(context.WithValue(ctx, k("key"), i))
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if !IsContextType(ctxs[i]) {
+ b.Fatal("not context")
+ }
+ }
+ })
+
+ b.Run("background", func(b *testing.B) {
+ var ctxt reflect.Type = reflect.TypeOf(context.Background())
+ for i := 0; i < b.N; i++ {
+ if !IsContextType(ctxt) {
+ b.Fatal("not context")
+ }
+ }
+ })
+}
+
func BenchmarkIsTruthFul(b *testing.B) {
v := reflect.ValueOf("Hugo")
diff --git a/common/hstrings/strings.go b/common/hstrings/strings.go
index 88df97607cf..d9426ab5da0 100644
--- a/common/hstrings/strings.go
+++ b/common/hstrings/strings.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -122,3 +122,8 @@ func InSlicEqualFold(arr []string, el string) bool {
}
return false
}
+
+type Tuple struct {
+ First string
+ Second string
+}
diff --git a/common/hstrings/strings_test.go b/common/hstrings/strings_test.go
index 85068bdf979..d8e9e204ab3 100644
--- a/common/hstrings/strings_test.go
+++ b/common/hstrings/strings_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -33,7 +33,6 @@ func TestStringEqualFold(t *testing.T) {
c.Assert(StringEqualFold(s1).EqualFold("b"), qt.Equals, false)
c.Assert(StringEqualFold(s1).Eq(s2), qt.Equals, true)
c.Assert(StringEqualFold(s1).Eq("b"), qt.Equals, false)
-
}
func TestGetOrCompileRegexp(t *testing.T) {
@@ -42,7 +41,6 @@ func TestGetOrCompileRegexp(t *testing.T) {
re, err := GetOrCompileRegexp(`\d+`)
c.Assert(err, qt.IsNil)
c.Assert(re.MatchString("123"), qt.Equals, true)
-
}
func BenchmarkGetOrCompileRegexp(b *testing.B) {
diff --git a/common/htime/integration_test.go b/common/htime/integration_test.go
index e72c216d93d..983fff1f77f 100644
--- a/common/htime/integration_test.go
+++ b/common/htime/integration_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/hugio/copy.go b/common/hugio/copy.go
index 8dbadc48c50..31d679dfce0 100644
--- a/common/hugio/copy.go
+++ b/common/hugio/copy.go
@@ -16,6 +16,7 @@ package hugio
import (
"fmt"
"io"
+ iofs "io/fs"
"path/filepath"
"github.com/spf13/afero"
@@ -60,12 +61,16 @@ func CopyDir(fs afero.Fs, from, to string, shouldCopy func(filename string) bool
return fmt.Errorf("%q is not a directory", from)
}
- err = fs.MkdirAll(to, 0777) // before umask
+ err = fs.MkdirAll(to, 0o777) // before umask
if err != nil {
return err
}
- entries, _ := afero.ReadDir(fs, from)
+ d, err := fs.Open(from)
+ if err != nil {
+ return err
+ }
+ entries, _ := d.(iofs.ReadDirFile).ReadDir(-1)
for _, entry := range entries {
fromFilename := filepath.Join(from, entry.Name())
toFilename := filepath.Join(to, entry.Name())
diff --git a/common/hugio/hasBytesWriter.go b/common/hugio/hasBytesWriter.go
index 7b7d7a5d756..5148c82f96a 100644
--- a/common/hugio/hasBytesWriter.go
+++ b/common/hugio/hasBytesWriter.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/hugio/hasBytesWriter_test.go b/common/hugio/hasBytesWriter_test.go
index b1b8011d5db..af53fa5dd49 100644
--- a/common/hugio/hasBytesWriter_test.go
+++ b/common/hugio/hasBytesWriter_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/hugio/readers.go b/common/hugio/readers.go
index 60bd97992fc..feb1b141214 100644
--- a/common/hugio/readers.go
+++ b/common/hugio/readers.go
@@ -14,6 +14,7 @@
package hugio
import (
+ "bytes"
"io"
"strings"
)
@@ -57,3 +58,22 @@ func NewReadSeekerNoOpCloser(r ReadSeeker) ReadSeekerNoOpCloser {
func NewReadSeekerNoOpCloserFromString(content string) ReadSeekerNoOpCloser {
return ReadSeekerNoOpCloser{strings.NewReader(content)}
}
+
+// NewReadSeekerNoOpCloserFromString uses strings.NewReader to create a new ReadSeekerNoOpCloser
+// from the given bytes slice.
+func NewReadSeekerNoOpCloserFromBytes(content []byte) ReadSeekerNoOpCloser {
+ return ReadSeekerNoOpCloser{bytes.NewReader(content)}
+}
+
+// NewReadSeekCloser creates a new ReadSeekCloser from the given ReadSeeker.
+// The ReadSeeker will be seeked to the beginning before returned.
+func NewOpenReadSeekCloser(r ReadSeekCloser) OpenReadSeekCloser {
+ return func() (ReadSeekCloser, error) {
+ r.Seek(0, io.SeekStart)
+ return r, nil
+ }
+}
+
+// OpenReadSeekCloser allows setting some other way (than reading from a filesystem)
+// to open or create a ReadSeekCloser.
+type OpenReadSeekCloser func() (ReadSeekCloser, error)
diff --git a/common/hugo/hugo.go b/common/hugo/hugo.go
index 67d52f6c834..be43e2a3882 100644
--- a/common/hugo/hugo.go
+++ b/common/hugo/hugo.go
@@ -35,6 +35,8 @@ import (
"github.com/spf13/afero"
+ iofs "io/fs"
+
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
)
@@ -159,7 +161,12 @@ func GetExecEnviron(workDir string, cfg config.AllProvider, fs afero.Fs) []strin
config.SetEnvVars(&env, "HUGO_PUBLISHDIR", filepath.Join(workDir, cfg.BaseConfig().PublishDir))
if fs != nil {
- fis, err := afero.ReadDir(fs, files.FolderJSConfig)
+ var fis []iofs.DirEntry
+ d, err := fs.Open(files.FolderJSConfig)
+ if err == nil {
+ fis, err = d.(iofs.ReadDirFile).ReadDir(-1)
+ }
+
if err == nil {
for _, fi := range fis {
key := fmt.Sprintf("HUGO_FILE_%s", strings.ReplaceAll(strings.ToUpper(fi.Name()), ".", "_"))
diff --git a/common/loggers/handlerdefault.go b/common/loggers/handlerdefault.go
index bb48895bc68..bc3c7eec25b 100644
--- a/common/loggers/handlerdefault.go
+++ b/common/loggers/handlerdefault.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
@@ -27,10 +27,9 @@ import (
"github.com/fatih/color"
)
-var bold = color.New(color.Bold)
-
// levelColor mapping.
var levelColor = [...]*color.Color{
+ logg.LevelTrace: color.New(color.FgWhite),
logg.LevelDebug: color.New(color.FgWhite),
logg.LevelInfo: color.New(color.FgBlue),
logg.LevelWarn: color.New(color.FgYellow),
@@ -39,6 +38,7 @@ var levelColor = [...]*color.Color{
// levelString mapping.
var levelString = [...]string{
+ logg.LevelTrace: "TRACE",
logg.LevelDebug: "DEBUG",
logg.LevelInfo: "INFO ",
logg.LevelWarn: "WARN ",
diff --git a/common/loggers/handlersmisc.go b/common/loggers/handlersmisc.go
index 5c9d6c0910a..55bf8b940d5 100644
--- a/common/loggers/handlersmisc.go
+++ b/common/loggers/handlersmisc.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
@@ -69,7 +69,7 @@ func (h *logLevelCounter) HandleLog(e *logg.Entry) error {
return nil
}
-var stopError = fmt.Errorf("stop")
+var errStop = fmt.Errorf("stop")
type logOnceHandler struct {
threshold logg.Level
@@ -87,7 +87,7 @@ func (h *logOnceHandler) HandleLog(e *logg.Entry) error {
defer h.mu.Unlock()
hash := identity.HashUint64(e.Level, e.Message, e.Fields)
if h.seen[hash] {
- return stopError
+ return errStop
}
h.seen[hash] = true
return nil
@@ -107,7 +107,7 @@ type stopHandler struct {
func (h *stopHandler) HandleLog(e *logg.Entry) error {
for _, handler := range h.handlers {
if err := handler.HandleLog(e); err != nil {
- if err == stopError {
+ if err == errStop {
return nil
}
return err
@@ -124,26 +124,13 @@ func (h *suppressStatementsHandler) HandleLog(e *logg.Entry) error {
for _, field := range e.Fields {
if field.Name == FieldNameStatementID {
if h.statements[field.Value.(string)] {
- return stopError
+ return errStop
}
}
}
return nil
}
-// replacer creates a new log handler that does string replacement in log messages.
-func replacer(repl *strings.Replacer) logg.Handler {
- return logg.HandlerFunc(func(e *logg.Entry) error {
- e.Message = repl.Replace(e.Message)
- for i, field := range e.Fields {
- if s, ok := field.Value.(string); ok {
- e.Fields[i].Value = repl.Replace(s)
- }
- }
- return nil
- })
-}
-
// whiteSpaceTrimmer creates a new log handler that trims whitespace from log messages and string fields.
func whiteSpaceTrimmer() logg.Handler {
return logg.HandlerFunc(func(e *logg.Entry) error {
diff --git a/common/loggers/handlerterminal.go b/common/loggers/handlerterminal.go
index e3d377bbfda..53f6e41da4d 100644
--- a/common/loggers/handlerterminal.go
+++ b/common/loggers/handlerterminal.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
@@ -81,7 +81,7 @@ func (h *noColoursHandler) HandleLog(e *logg.Entry) error {
if strings.HasPrefix(field.Name, reservedFieldNamePrefix) {
continue
}
- fmt.Fprintf(w, " %s %q", field.Name, field.Value)
+ fmt.Fprintf(w, " %s %v", field.Name, field.Value)
}
fmt.Fprintln(w)
diff --git a/common/loggers/logger.go b/common/loggers/logger.go
index bc64ae0e5b7..c4d81fb8305 100644
--- a/common/loggers/logger.go
+++ b/common/loggers/logger.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
@@ -68,11 +68,24 @@ func New(opts Options) Logger {
errorsw := &strings.Builder{}
logCounters := newLogLevelCounter()
handlers := []logg.Handler{
- whiteSpaceTrimmer(),
- logHandler,
logCounters,
}
+ if opts.Level == logg.LevelTrace {
+ // Trace is used during development only, and it's useful to
+ // only see the trace messages.
+ handlers = append(handlers,
+ logg.HandlerFunc(func(e *logg.Entry) error {
+ if e.Level != logg.LevelTrace {
+ return logg.ErrStopLogEntry
+ }
+ return nil
+ }),
+ )
+ }
+
+ handlers = append(handlers, whiteSpaceTrimmer(), logHandler)
+
if opts.HandlerPost != nil {
var hookHandler logg.HandlerFunc = func(e *logg.Entry) error {
opts.HandlerPost(e)
@@ -127,6 +140,7 @@ func New(opts Options) Logger {
out: opts.Stdout,
level: opts.Level,
logger: logger,
+ tracel: l.WithLevel(logg.LevelTrace),
debugl: l.WithLevel(logg.LevelDebug),
infol: l.WithLevel(logg.LevelInfo),
warnl: l.WithLevel(logg.LevelWarn),
@@ -145,11 +159,22 @@ func NewDefault() Logger {
return New(opts)
}
+func NewTrace() Logger {
+ opts := Options{
+ DistinctLevel: logg.LevelWarn,
+ Level: logg.LevelTrace,
+ Stdout: os.Stdout,
+ Stderr: os.Stdout,
+ }
+ return New(opts)
+}
+
func LevelLoggerToWriter(l logg.LevelLogger) io.Writer {
return logWriter{l: l}
}
type Logger interface {
+ Debug() logg.LevelLogger
Debugf(format string, v ...any)
Debugln(v ...any)
Error() logg.LevelLogger
@@ -174,6 +199,7 @@ type Logger interface {
Warnf(format string, v ...any)
Warnln(v ...any)
Deprecatef(fail bool, format string, v ...any)
+ Trace(s logg.StringFunc)
}
type logAdapter struct {
@@ -183,12 +209,17 @@ type logAdapter struct {
out io.Writer
level logg.Level
logger logg.Logger
+ tracel logg.LevelLogger
debugl logg.LevelLogger
infol logg.LevelLogger
warnl logg.LevelLogger
errorl logg.LevelLogger
}
+func (l *logAdapter) Debug() logg.LevelLogger {
+ return l.debugl
+}
+
func (l *logAdapter) Debugf(format string, v ...any) {
l.debugl.Logf(format, v...)
}
@@ -294,6 +325,10 @@ func (l *logAdapter) Errorsf(id, format string, v ...any) {
l.errorl.WithField(FieldNameStatementID, id).Logf(format, v...)
}
+func (l *logAdapter) Trace(s logg.StringFunc) {
+ l.tracel.Log(s)
+}
+
func (l *logAdapter) sprint(v ...any) string {
return strings.TrimRight(fmt.Sprintln(v...), "\n")
}
@@ -315,3 +350,19 @@ func (w logWriter) Write(p []byte) (n int, err error) {
w.l.Log(logg.String(string(p)))
return len(p), nil
}
+
+func TimeTrackf(l logg.LevelLogger, start time.Time, fields logg.Fields, format string, a ...any) {
+ elapsed := time.Since(start)
+ if fields != nil {
+ l = l.WithFields(fields)
+ }
+ l.WithField("duration", elapsed).Logf(format, a...)
+}
+
+func TimeTrackfn(fn func() (logg.LevelLogger, error)) error {
+ start := time.Now()
+ l, err := fn()
+ elapsed := time.Since(start)
+ l.WithField("duration", elapsed).Logf("")
+ return err
+}
diff --git a/common/loggers/logger_test.go b/common/loggers/logger_test.go
index 6f589aafe99..dcf94b12313 100644
--- a/common/loggers/logger_test.go
+++ b/common/loggers/logger_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
diff --git a/common/loggers/loggerglobal.go b/common/loggers/loggerglobal.go
index 6fd474a6935..c3e2970d053 100644
--- a/common/loggers/loggerglobal.go
+++ b/common/loggers/loggerglobal.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
diff --git a/common/maps/cache.go b/common/maps/cache.go
new file mode 100644
index 00000000000..7e23a2662c6
--- /dev/null
+++ b/common/maps/cache.go
@@ -0,0 +1,90 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import "sync"
+
+// Cache is a simple thread safe cache backed by a map.
+type Cache[K comparable, T any] struct {
+ m map[K]T
+ sync.RWMutex
+}
+
+// NewCache creates a new Cache.
+func NewCache[K comparable, T any]() *Cache[K, T] {
+ return &Cache[K, T]{m: make(map[K]T)}
+}
+
+// Delete deletes the given key from the cache.
+func (c *Cache[K, T]) Get(key K) (T, bool) {
+ c.RLock()
+ v, found := c.m[key]
+ c.RUnlock()
+ return v, found
+}
+
+// GetOrCreate gets the value for the given key if it exists, or creates it if not.
+func (c *Cache[K, T]) GetOrCreate(key K, create func() T) T {
+ c.RLock()
+ v, found := c.m[key]
+ c.RUnlock()
+ if found {
+ return v
+ }
+ c.Lock()
+ defer c.Unlock()
+ v, found = c.m[key]
+ if found {
+ return v
+ }
+ v = create()
+ c.m[key] = v
+ return v
+}
+
+// Set sets the given key to the given value.
+func (c *Cache[K, T]) Set(key K, value T) {
+ c.Lock()
+ c.m[key] = value
+ c.Unlock()
+}
+
+// SliceCache is a simple thread safe cache backed by a map.
+type SliceCache[T any] struct {
+ m map[string][]T
+ sync.RWMutex
+}
+
+func NewSliceCache[T any]() *SliceCache[T] {
+ return &SliceCache[T]{m: make(map[string][]T)}
+}
+
+func (c *SliceCache[T]) Get(key string) ([]T, bool) {
+ c.RLock()
+ v, found := c.m[key]
+ c.RUnlock()
+ return v, found
+}
+
+func (c *SliceCache[T]) Append(key string, values ...T) {
+ c.Lock()
+ c.m[key] = append(c.m[key], values...)
+ c.Unlock()
+}
+
+func (c *SliceCache[T]) Reset() {
+ c.Lock()
+ c.m = make(map[string][]T)
+ c.Unlock()
+}
diff --git a/common/maps/maps.go b/common/maps/maps.go
index f0fd3d5ce9c..2686baad6dd 100644
--- a/common/maps/maps.go
+++ b/common/maps/maps.go
@@ -29,7 +29,7 @@ func ToStringMapE(in any) (map[string]any, error) {
case Params:
return vv, nil
case map[string]string:
- var m = map[string]any{}
+ m := map[string]any{}
for k, v := range vv {
m[k] = v
}
@@ -192,21 +192,20 @@ func (KeyRenamer) keyPath(k1, k2 string) string {
}
func (r KeyRenamer) renamePath(parentKeyPath string, m map[string]any) {
- for key, val := range m {
- keyPath := r.keyPath(parentKeyPath, key)
- switch val.(type) {
+ for k, v := range m {
+ keyPath := r.keyPath(parentKeyPath, k)
+ switch vv := v.(type) {
case map[any]any:
- val = cast.ToStringMap(val)
- r.renamePath(keyPath, val.(map[string]any))
+ r.renamePath(keyPath, cast.ToStringMap(vv))
case map[string]any:
- r.renamePath(keyPath, val.(map[string]any))
+ r.renamePath(keyPath, vv)
}
newKey := r.getNewKey(keyPath)
if newKey != "" {
- delete(m, key)
- m[newKey] = val
+ delete(m, k)
+ m[newKey] = v
}
}
}
diff --git a/common/maps/params.go b/common/maps/params.go
index d94d16f9d6c..a8cbba55550 100644
--- a/common/maps/params.go
+++ b/common/maps/params.go
@@ -61,7 +61,7 @@ func SetParams(dst, src Params) {
// IsZero returns true if p is considered empty.
func (p Params) IsZero() bool {
- if p == nil || len(p) == 0 {
+ if len(p) == 0 {
return true
}
@@ -74,7 +74,6 @@ func (p Params) IsZero() bool {
}
return false
-
}
// MergeParamsWithStrategy transfers values from src to dst for new keys using the merge strategy given.
@@ -93,7 +92,7 @@ func MergeParams(dst, src Params) {
func (p Params) merge(ps ParamsMergeStrategy, pp Params) {
ns, found := p.GetMergeStrategy()
- var ms = ns
+ ms := ns
if !found && ps != "" {
ms = ps
}
@@ -248,7 +247,7 @@ const (
// CleanConfigStringMapString removes any processing instructions from m,
// m will never be modified.
func CleanConfigStringMapString(m map[string]string) map[string]string {
- if m == nil || len(m) == 0 {
+ if len(m) == 0 {
return m
}
if _, found := m[MergeStrategyKey]; !found {
@@ -267,7 +266,7 @@ func CleanConfigStringMapString(m map[string]string) map[string]string {
// CleanConfigStringMap is the same as CleanConfigStringMapString but for
// map[string]any.
func CleanConfigStringMap(m map[string]any) map[string]any {
- if m == nil || len(m) == 0 {
+ if len(m) == 0 {
return m
}
if _, found := m[MergeStrategyKey]; !found {
@@ -291,7 +290,6 @@ func CleanConfigStringMap(m map[string]any) map[string]any {
}
return m2
-
}
func toMergeStrategy(v any) ParamsMergeStrategy {
diff --git a/common/paths/path.go b/common/paths/path.go
index 5d211c5e0bc..da99b16ac3c 100644
--- a/common/paths/path.go
+++ b/common/paths/path.go
@@ -16,14 +16,18 @@ package paths
import (
"errors"
"fmt"
+ "net/url"
"path"
"path/filepath"
- "regexp"
"strings"
+ "unicode"
)
// FilePathSeparator as defined by os.Separator.
-const FilePathSeparator = string(filepath.Separator)
+const (
+ FilePathSeparator = string(filepath.Separator)
+ slash = "/"
+)
// filepathPathBridge is a bridge for common functionality in filepath vs path
type filepathPathBridge interface {
@@ -72,6 +76,30 @@ func AbsPathify(workingDir, inPath string) string {
return filepath.Join(workingDir, inPath)
}
+// AddTrailingSlash adds a trailing Unix styled slash (/) if not already
+// there.
+func AddTrailingSlash(path string) string {
+ if !strings.HasSuffix(path, "/") {
+ path += "/"
+ }
+ return path
+}
+
+// AddLeadingSlash adds a leading Unix styled slash (/) if not already
+// there.
+func AddLeadingSlash(path string) string {
+ if !strings.HasPrefix(path, "/") {
+ path = "/" + path
+ }
+ return path
+}
+
+// AddTrailingAndLeadingSlash adds a leading and trailing Unix styled slash (/) if not already
+// there.
+func AddLeadingAndTrailingSlash(path string) string {
+ return AddTrailingSlash(AddLeadingSlash(path))
+}
+
// MakeTitle converts the path given to a suitable title, trimming whitespace
// and replacing hyphens with whitespace.
func MakeTitle(inpath string) string {
@@ -94,43 +122,6 @@ func makePathRelative(inPath string, possibleDirectories ...string) (string, err
return inPath, errors.New("can't extract relative path, unknown prefix")
}
-// Should be good enough for Hugo.
-var isFileRe = regexp.MustCompile(`.*\..{1,6}$`)
-
-// GetDottedRelativePath expects a relative path starting after the content directory.
-// It returns a relative path with dots ("..") navigating up the path structure.
-func GetDottedRelativePath(inPath string) string {
- inPath = path.Clean(filepath.ToSlash(inPath))
-
- if inPath == "." {
- return "./"
- }
-
- if !isFileRe.MatchString(inPath) && !strings.HasSuffix(inPath, "/") {
- inPath += "/"
- }
-
- if !strings.HasPrefix(inPath, "/") {
- inPath = "/" + inPath
- }
-
- dir, _ := filepath.Split(inPath)
-
- sectionCount := strings.Count(dir, "/")
-
- if sectionCount == 0 || dir == "/" {
- return "./"
- }
-
- var dottedPath string
-
- for i := 1; i < sectionCount; i++ {
- dottedPath += "../"
- }
-
- return dottedPath
-}
-
// ExtNoDelimiter takes a path and returns the extension, excluding the delimiter, i.e. "md".
func ExtNoDelimiter(in string) string {
return strings.TrimPrefix(Ext(in), ".")
@@ -167,12 +158,6 @@ func Filename(in string) (name string) {
return
}
-// PathNoExt takes a path, strips out the extension,
-// and returns the name of the file.
-func PathNoExt(in string) string {
- return strings.TrimSuffix(in, path.Ext(in))
-}
-
// FileAndExt returns the filename and any extension of a file path as
// two separate strings.
//
@@ -252,16 +237,125 @@ func prettifyPath(in string, b filepathPathBridge) string {
return b.Join(b.Dir(in), name, "index"+ext)
}
-type NamedSlice struct {
- Name string
- Slice []string
+// CommonDir returns the common directory of the given paths.
+func CommonDir(path1, path2 string) string {
+ if path1 == "" || path2 == "" {
+ return ""
+ }
+
+ p1 := strings.Split(path1, "/")
+ p2 := strings.Split(path2, "/")
+
+ var common []string
+
+ for i := 0; i < len(p1) && i < len(p2); i++ {
+ if p1[i] == p2[i] {
+ common = append(common, p1[i])
+ } else {
+ break
+ }
+ }
+
+ return strings.Join(common, "/")
+}
+
+// Sanitize sanitizes string to be used in Hugo's file paths and URLs, allowing only
+// a predefined set of special Unicode characters.
+//
+// Spaces will be replaced with a single hyphen.
+//
+// This function is the core function used to normalize paths in Hugo.
+//
+// Note that this is the first common step for URL/path sanitation,
+// the final URL/path may end up looking differently if the user has stricter rules defined (e.g. removePathAccents=true).
+func Sanitize(s string) string {
+ var willChange bool
+ for i, r := range s {
+ willChange = !isAllowedPathCharacter(s, i, r)
+ if willChange {
+ break
+ }
+ }
+
+ if !willChange {
+ // Prevent allocation when nothing changes.
+ return s
+ }
+
+ target := make([]rune, 0, len(s))
+ var (
+ prependHyphen bool
+ wasHyphen bool
+ )
+
+ for i, r := range s {
+ isAllowed := isAllowedPathCharacter(s, i, r)
+
+ if isAllowed {
+ // track explicit hyphen in input; no need to add a new hyphen if
+ // we just saw one.
+ wasHyphen = r == '-'
+
+ if prependHyphen {
+ // if currently have a hyphen, don't prepend an extra one
+ if !wasHyphen {
+ target = append(target, '-')
+ }
+ prependHyphen = false
+ }
+ target = append(target, r)
+ } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) {
+ prependHyphen = true
+ }
+ }
+
+ return string(target)
+}
+
+func isAllowedPathCharacter(s string, i int, r rune) bool {
+ if r == ' ' {
+ return false
+ }
+ // Check for the most likely first (faster).
+ isAllowed := unicode.IsLetter(r) || unicode.IsDigit(r)
+ isAllowed = isAllowed || r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-' || r == '@'
+ isAllowed = isAllowed || unicode.IsMark(r)
+ isAllowed = isAllowed || (r == '%' && i+2 < len(s) && ishex(s[i+1]) && ishex(s[i+2]))
+ return isAllowed
}
-func (n NamedSlice) String() string {
- if len(n.Slice) == 0 {
- return n.Name
+// From https://golang.org/src/net/url/url.go
+func ishex(c byte) bool {
+ switch {
+ case '0' <= c && c <= '9':
+ return true
+ case 'a' <= c && c <= 'f':
+ return true
+ case 'A' <= c && c <= 'F':
+ return true
}
- return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ","))
+ return false
+}
+
+var slashFunc = func(r rune) bool {
+ return r == '/'
+}
+
+// Dir behaves like path.Dir without the path.Clean step.
+//
+// The returned path ends in a slash only if it is the root "/".
+func Dir(s string) string {
+ dir, _ := path.Split(s)
+ if len(dir) > 1 && dir[len(dir)-1] == '/' {
+ return dir[:len(dir)-1]
+ }
+ return dir
+}
+
+// FieldsSlash cuts s into fields separated with '/'.
+func FieldsSlash(s string) []string {
+ f := strings.FieldsFunc(s, slashFunc)
+ return f
}
// DirFile holds the result from path.Split.
@@ -274,3 +368,27 @@ type DirFile struct {
func (df DirFile) String() string {
return fmt.Sprintf("%s|%s", df.Dir, df.File)
}
+
+// PathEscape escapes unicode letters in pth.
+// Use URLEscape to escape full URLs including scheme, query etc.
+// This is slightly faster for the common case.
+// Note, there is a url.PathEscape function, but that also
+// escapes /.
+func PathEscape(pth string) string {
+ u, err := url.Parse(pth)
+ if err != nil {
+ panic(err)
+ }
+ return u.EscapedPath()
+}
+
+// ToSlashTrimLeading is just a filepath.ToSlash with an added / prefix trimmer.
+func ToSlashTrimLeading(s string) string {
+ return strings.TrimPrefix(filepath.ToSlash(s), "/")
+}
+
+// ToSlashPreserveLeading converts the path given to a forward slash separated path
+// and preserves the leading slash if present trimming any trailing slash.
+func ToSlashPreserveLeading(s string) string {
+ return "/" + strings.Trim(filepath.ToSlash(s), "/")
+}
diff --git a/common/paths/path_test.go b/common/paths/path_test.go
index 2400f16ab60..3605bfc4330 100644
--- a/common/paths/path_test.go
+++ b/common/paths/path_test.go
@@ -1,4 +1,4 @@
-// Copyright 2021 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -75,44 +75,6 @@ func TestMakePathRelative(t *testing.T) {
}
}
-func TestGetDottedRelativePath(t *testing.T) {
- // on Windows this will receive both kinds, both country and western ...
- for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
- doTestGetDottedRelativePath(f, t)
- }
-}
-
-func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
- type test struct {
- input, expected string
- }
- data := []test{
- {"", "./"},
- {urlFixer("/"), "./"},
- {urlFixer("post"), "../"},
- {urlFixer("/post"), "../"},
- {urlFixer("post/"), "../"},
- {urlFixer("tags/foo.html"), "../"},
- {urlFixer("/tags/foo.html"), "../"},
- {urlFixer("/post/"), "../"},
- {urlFixer("////post/////"), "../"},
- {urlFixer("/foo/bar/index.html"), "../../"},
- {urlFixer("/foo/bar/foo/"), "../../../"},
- {urlFixer("/foo/bar/foo"), "../../../"},
- {urlFixer("foo/bar/foo/"), "../../../"},
- {urlFixer("foo/bar/foo/bar"), "../../../../"},
- {"404.html", "./"},
- {"404.xml", "./"},
- {"/404.html", "./"},
- }
- for i, d := range data {
- output := GetDottedRelativePath(d.input)
- if d.expected != output {
- t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
- }
- }
-}
-
func TestMakeTitle(t *testing.T) {
type test struct {
input, expected string
@@ -226,3 +188,77 @@ func TestFileAndExt(t *testing.T) {
}
}
}
+
+func TestSanitize(t *testing.T) {
+ c := qt.New(t)
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {" Foo bar ", "Foo-bar"},
+ {"Foo.Bar/foo_Bar-Foo", "Foo.Bar/foo_Bar-Foo"},
+ {"fOO,bar:foobAR", "fOObarfoobAR"},
+ {"FOo/BaR.html", "FOo/BaR.html"},
+ {"FOo/Ba---R.html", "FOo/Ba---R.html"}, /// See #10104
+ {"FOo/Ba R.html", "FOo/Ba-R.html"},
+ {"трям/трям", "трям/трям"},
+ {"은행", "은행"},
+ {"Банковский кассир", "Банковский-кассир"},
+ // Issue #1488
+ {"संस्कृत", "संस्कृत"},
+ {"a%C3%B1ame", "a%C3%B1ame"}, // Issue #1292
+ {"this+is+a+test", "this+is+a+test"}, // Issue #1290
+ {"~foo", "~foo"}, // Issue #2177
+
+ }
+
+ for _, test := range tests {
+ c.Assert(Sanitize(test.input), qt.Equals, test.expected)
+ }
+}
+
+func BenchmarkSanitize(b *testing.B) {
+ const (
+ allAlowedPath = "foo/bar"
+ spacePath = "foo bar"
+ )
+
+ // This should not allocate any memory.
+ b.Run("All allowed", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ got := Sanitize(allAlowedPath)
+ if got != allAlowedPath {
+ b.Fatal(got)
+ }
+ }
+ })
+
+ // This will allocate some memory.
+ b.Run("Spaces", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ got := Sanitize(spacePath)
+ if got != "foo-bar" {
+ b.Fatal(got)
+ }
+ }
+ })
+}
+
+func TestDir(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(Dir("/a/b/c/d"), qt.Equals, "/a/b/c")
+ c.Assert(Dir("/a"), qt.Equals, "/")
+ c.Assert(Dir("/"), qt.Equals, "/")
+ c.Assert(Dir(""), qt.Equals, "")
+}
+
+func TestFieldsSlash(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(FieldsSlash("a/b/c"), qt.DeepEquals, []string{"a", "b", "c"})
+ c.Assert(FieldsSlash("/a/b/c"), qt.DeepEquals, []string{"a", "b", "c"})
+ c.Assert(FieldsSlash("/a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"})
+ c.Assert(FieldsSlash("a/b/c/"), qt.DeepEquals, []string{"a", "b", "c"})
+ c.Assert(FieldsSlash("/"), qt.DeepEquals, []string{})
+ c.Assert(FieldsSlash(""), qt.DeepEquals, []string{})
+}
diff --git a/common/paths/pathparser.go b/common/paths/pathparser.go
new file mode 100644
index 00000000000..842d9307b06
--- /dev/null
+++ b/common/paths/pathparser.go
@@ -0,0 +1,494 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "path"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/hugofs/files"
+)
+
+var defaultPathParser PathParser
+
+// PathParser parses a path into a Path.
+type PathParser struct {
+ // Maps the language code to its index in the languages/sites slice.
+ LanguageIndex map[string]int
+}
+
+// Parse parses component c with path s into Path using the default path parser.
+func Parse(c, s string) *Path {
+ return defaultPathParser.Parse(c, s)
+}
+
+// NormalizePathString returns a normalized path string using the very basic Hugo rules.
+func NormalizePathStringBasic(s string) string {
+ // All lower case.
+ s = strings.ToLower(s)
+
+ // Replace spaces with hyphens.
+ s = strings.ReplaceAll(s, " ", "-")
+
+ return s
+}
+
+// Parse parses component c with path s into Path using Hugo's content path rules.
+func (parser PathParser) Parse(c, s string) *Path {
+ p, err := parser.parse(c, s)
+ if err != nil {
+ panic(err)
+ }
+ return p
+}
+
+func (pp *PathParser) parse(component, s string) (*Path, error) {
+ ss := NormalizePathStringBasic(s)
+
+ p, err := pp.doParse(component, ss)
+ if err != nil {
+ return nil, err
+ }
+
+ if s != ss {
+ var err error
+ // Preserve the original case for titles etc.
+ p.unnormalized, err = pp.doParse(component, s)
+
+ if err != nil {
+ return nil, err
+ }
+ } else {
+ p.unnormalized = p
+ }
+
+ return p, nil
+}
+
+func (pp *PathParser) doParse(component, s string) (*Path, error) {
+ p := &Path{
+ component: component,
+ posContainerLow: -1,
+ posContainerHigh: -1,
+ posSectionHigh: -1,
+ posIdentifierLanguage: -1,
+ }
+
+ hasLang := pp.LanguageIndex != nil
+ hasLang = hasLang && (component == files.ComponentFolderContent || component == files.ComponentFolderLayouts)
+
+ if runtime.GOOS == "windows" {
+ s = path.Clean(filepath.ToSlash(s))
+ if s == "." {
+ s = ""
+ }
+ }
+
+ if s == "" {
+ s = "/"
+ }
+
+ // Leading slash, no trailing slash.
+ if !strings.HasPrefix(s, "/") {
+ s = "/" + s
+ }
+
+ if s != "/" && s[len(s)-1] == '/' {
+ s = s[:len(s)-1]
+ }
+
+ p.s = s
+ slashCount := 0
+
+ for i := len(s) - 1; i >= 0; i-- {
+ c := s[i]
+
+ switch c {
+ case '.':
+ if p.posContainerHigh == -1 {
+ var high int
+ if len(p.identifiers) > 0 {
+ high = p.identifiers[len(p.identifiers)-1].Low - 1
+ } else {
+ high = len(p.s)
+ }
+ id := types.LowHigh{Low: i + 1, High: high}
+ if len(p.identifiers) == 0 {
+ p.identifiers = append(p.identifiers, id)
+ } else if len(p.identifiers) == 1 {
+ // Check for a valid language.
+ s := p.s[id.Low:id.High]
+
+ if hasLang {
+ if _, found := pp.LanguageIndex[s]; found {
+ p.posIdentifierLanguage = 1
+ p.identifiers = append(p.identifiers, id)
+ }
+ }
+ }
+ }
+ case '/':
+ slashCount++
+ if p.posContainerHigh == -1 {
+ p.posContainerHigh = i + 1
+ } else if p.posContainerLow == -1 {
+ p.posContainerLow = i + 1
+ }
+ if i > 0 {
+ p.posSectionHigh = i
+ }
+ }
+ }
+
+ isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes
+ isContent := isContentComponent && files.IsContentExt(p.Ext())
+
+ if isContent {
+ id := p.identifiers[len(p.identifiers)-1]
+ b := p.s[p.posContainerHigh : id.Low-1]
+ switch b {
+ case "index":
+ p.bundleType = PathTypeLeaf
+ case "_index":
+ p.bundleType = PathTypeBranch
+ default:
+ p.bundleType = PathTypeContentSingle
+ }
+
+ if slashCount == 2 && p.IsLeafBundle() {
+ p.posSectionHigh = 0
+ }
+ }
+
+ return p, nil
+}
+
+func ModifyPathBundleTypeResource(p *Path) {
+ if p.IsContent() {
+ p.bundleType = PathTypeContentResource
+ } else {
+ p.bundleType = PathTypeFile
+ }
+}
+
+type PathType int
+
+const (
+ // A generic resource, e.g. a JSON file.
+ PathTypeFile PathType = iota
+
+ // All below are content files.
+ // A resource of a content type with front matter.
+ PathTypeContentResource
+
+ // E.g. /blog/my-post.md
+ PathTypeContentSingle
+
+ // All bewlow are bundled content files.
+
+ // Leaf bundles, e.g. /blog/my-post/index.md
+ PathTypeLeaf
+
+ // Branch bundles, e.g. /blog/_index.md
+ PathTypeBranch
+)
+
+type Path struct {
+ s string
+
+ posContainerLow int
+ posContainerHigh int
+ posSectionHigh int
+
+ component string
+ bundleType PathType
+
+ identifiers []types.LowHigh
+
+ posIdentifierLanguage int
+
+ trimLeadingSlash bool
+
+ unnormalized *Path
+}
+
+// TrimLeadingSlash returns a copy of the Path with the leading slash removed.
+func (p Path) TrimLeadingSlash() *Path {
+ p.trimLeadingSlash = true
+ return &p
+}
+
+func (p *Path) norm(s string) string {
+ if p.trimLeadingSlash {
+ s = strings.TrimPrefix(s, "/")
+ }
+ return s
+}
+
+// IdentifierBase satifies identity.Identity.
+func (p *Path) IdentifierBase() string {
+ return p.Base()[1:]
+}
+
+// Component returns the component for this path (e.g. "content").
+func (p *Path) Component() string {
+ return p.component
+}
+
+// Container returns the base name of the container directory for this path.
+func (p *Path) Container() string {
+ if p.posContainerLow == -1 {
+ return ""
+ }
+ return p.norm(p.s[p.posContainerLow : p.posContainerHigh-1])
+}
+
+// ContainerDir returns the container directory for this path.
+// For content bundles this will be the parent directory.
+func (p *Path) ContainerDir() string {
+ if p.posContainerLow == -1 || !p.IsBundle() {
+ return p.Dir()
+ }
+ return p.norm(p.s[:p.posContainerLow-1])
+}
+
+// Section returns the first path element (section).
+func (p *Path) Section() string {
+ if p.posSectionHigh <= 0 {
+ return ""
+ }
+ return p.norm(p.s[1:p.posSectionHigh])
+}
+
+// IsContent returns true if the path is a content file (e.g. mypost.md).
+// Note that this will also return true for content files in a bundle.
+func (p *Path) IsContent() bool {
+ return p.BundleType() >= PathTypeContentResource
+}
+
+// isContentPage returns true if the path is a content file (e.g. mypost.md),
+// but nof if inside a leaf bundle.
+func (p *Path) isContentPage() bool {
+ return p.BundleType() >= PathTypeContentSingle
+}
+
+// Name returns the last element of path.
+func (p *Path) Name() string {
+ if p.posContainerHigh > 0 {
+ return p.s[p.posContainerHigh:]
+ }
+ return p.s
+}
+
+// Name returns the last element of path withhout any extension.
+func (p *Path) NameNoExt() string {
+ if i := p.identifierIndex(0); i != -1 {
+ return p.s[p.posContainerHigh : p.identifiers[i].Low-1]
+ }
+ return p.s[p.posContainerHigh:]
+}
+
+// Name returns the last element of path withhout any language identifier.
+func (p *Path) NameNoLang() string {
+ i := p.identifierIndex(p.posIdentifierLanguage)
+ if i == -1 {
+ return p.Name()
+ }
+
+ return p.s[p.posContainerHigh:p.identifiers[i].Low-1] + p.s[p.identifiers[i].High:]
+}
+
+// BaseNameNoIdentifier returns the logcical base name for a resource without any idenifier (e.g. no extension).
+// For bundles this will be the containing directory's name, e.g. "blog".
+func (p *Path) BaseNameNoIdentifier() string {
+ if p.IsBundle() {
+ return p.Container()
+ }
+ return p.NameNoIdentifier()
+}
+
+// NameNoIdentifier returns the last element of path withhout any identifier (e.g. no extension).
+func (p *Path) NameNoIdentifier() string {
+ if len(p.identifiers) > 0 {
+ return p.s[p.posContainerHigh : p.identifiers[len(p.identifiers)-1].Low-1]
+ }
+ return p.s[p.posContainerHigh:]
+}
+
+// Dir returns all but the last element of path, typically the path's directory.
+func (p *Path) Dir() (d string) {
+ if p.posContainerHigh > 0 {
+ d = p.s[:p.posContainerHigh-1]
+ }
+ if d == "" {
+ d = "/"
+ }
+ d = p.norm(d)
+ return
+}
+
+// Path returns the full path.
+func (p *Path) Path() (d string) {
+ return p.norm(p.s)
+}
+
+// Unmormalized returns the Path with the original case preserved.
+func (p *Path) Unmormalized() *Path {
+ return p.unnormalized
+}
+
+// PathNoLang returns the Path but with any language identifier removed.
+func (p *Path) PathNoLang() string {
+ return p.base(true, false)
+}
+
+// PathNoIdentifier returns the Path but with any identifier (ext, lang) removed.
+func (p *Path) PathNoIdentifier() string {
+ return p.base(false, false)
+}
+
+// PathRel returns the path relativeto the given owner.
+func (p *Path) PathRel(owner *Path) string {
+ ob := owner.Base()
+ if !strings.HasSuffix(ob, "/") {
+ ob += "/"
+ }
+ return strings.TrimPrefix(p.Path(), ob)
+}
+
+// BaseRel returns the base path relative to the given owner.
+func (p *Path) BaseRel(owner *Path) string {
+ ob := owner.Base()
+ if ob == "/" {
+ ob = ""
+ }
+ return p.Base()[len(ob)+1:]
+}
+
+// For content files, Base returns the path without any identifiers (extension, language code etc.).
+// Any 'index' as the last path element is ignored.
+//
+// For other files (Resources), any extension is kept.
+func (p *Path) Base() string {
+ return p.base(!p.isContentPage(), p.IsBundle())
+}
+
+// BaseNoLeadingSlash returns the base path without the leading slash.
+func (p *Path) BaseNoLeadingSlash() string {
+ return p.Base()[1:]
+}
+
+func (p *Path) base(preserveExt, isBundle bool) string {
+ if len(p.identifiers) == 0 {
+ return p.norm(p.s)
+ }
+
+ if preserveExt && len(p.identifiers) == 1 {
+ // Preserve extension.
+ return p.norm(p.s)
+ }
+
+ id := p.identifiers[len(p.identifiers)-1]
+ high := id.Low - 1
+
+ if isBundle {
+ high = p.posContainerHigh - 1
+ }
+
+ if high == 0 {
+ high++
+ }
+
+ if !preserveExt {
+ return p.norm(p.s[:high])
+ }
+
+ // For txt files etc. we want to preserve the extension.
+ id = p.identifiers[0]
+
+ return p.norm(p.s[:high] + p.s[id.Low-1:id.High])
+}
+
+func (p *Path) Ext() string {
+ return p.identifierAsString(0)
+}
+
+func (p *Path) Lang() string {
+ return p.identifierAsString(1)
+}
+
+func (p *Path) Identifier(i int) string {
+ return p.identifierAsString(i)
+}
+
+func (p *Path) Identifiers() []string {
+ ids := make([]string, len(p.identifiers))
+ for i, id := range p.identifiers {
+ ids[i] = p.s[id.Low:id.High]
+ }
+ return ids
+}
+
+func (p *Path) IsHTML() bool {
+ return files.IsHTML(p.Ext())
+}
+
+func (p *Path) BundleType() PathType {
+ return p.bundleType
+}
+
+func (p *Path) IsBundle() bool {
+ return p.bundleType >= PathTypeLeaf
+}
+
+func (p *Path) IsBranchBundle() bool {
+ return p.bundleType == PathTypeBranch
+}
+
+func (p *Path) IsLeafBundle() bool {
+ return p.bundleType == PathTypeLeaf
+}
+
+func (p *Path) identifierAsString(i int) string {
+ i = p.identifierIndex(i)
+ if i == -1 {
+ return ""
+ }
+
+ id := p.identifiers[i]
+ return p.s[id.Low:id.High]
+}
+
+func (p *Path) identifierIndex(i int) int {
+ if i < 0 || i >= len(p.identifiers) {
+ return -1
+ }
+ return i
+}
+
+// HasExt returns true if the Unix styled path has an extension.
+func HasExt(p string) bool {
+ for i := len(p) - 1; i >= 0; i-- {
+ if p[i] == '.' {
+ return true
+ }
+ if p[i] == '/' {
+ return false
+ }
+ }
+ return false
+}
diff --git a/common/paths/pathparser_test.go b/common/paths/pathparser_test.go
new file mode 100644
index 00000000000..3546b66050e
--- /dev/null
+++ b/common/paths/pathparser_test.go
@@ -0,0 +1,351 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var testParser = &PathParser{
+ LanguageIndex: map[string]int{
+ "no": 0,
+ "en": 1,
+ },
+}
+
+func TestParse(t *testing.T) {
+ c := qt.New(t)
+
+ tests := []struct {
+ name string
+ path string
+ assert func(c *qt.C, p *Path)
+ }{
+ {
+ "Basic text file",
+ "/a/b.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b.txt")
+ c.Assert(p.Base(), qt.Equals, "/a/b.txt")
+ c.Assert(p.Container(), qt.Equals, "a")
+ c.Assert(p.Dir(), qt.Equals, "/a")
+ c.Assert(p.Ext(), qt.Equals, "txt")
+ c.Assert(p.IsContent(), qt.IsFalse)
+ },
+ },
+ {
+ "Basic text file, upper case",
+ "/A/B.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b.txt")
+ c.Assert(p.NameNoExt(), qt.Equals, "b")
+ c.Assert(p.NameNoIdentifier(), qt.Equals, "b")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b")
+ c.Assert(p.Base(), qt.Equals, "/a/b.txt")
+ c.Assert(p.Ext(), qt.Equals, "txt")
+ },
+ },
+ {
+ "Basic text file, 1 space in dir",
+ "/a b/c.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a-b/c.txt")
+ },
+ },
+ {
+ "Basic text file, 2 spaces in dir",
+ "/a b/c.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a--b/c.txt")
+ },
+ },
+ {
+ "Basic text file, 1 space in filename",
+ "/a/b c.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b-c.txt")
+ },
+ },
+ {
+ "Basic text file, 2 spaces in filename",
+ "/a/b c.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b--c.txt")
+ },
+ },
+ {
+ "Basic text file, mixed case and spaces, unnormalized",
+ "/a/Foo BAR.txt",
+ func(c *qt.C, p *Path) {
+ pp := p.Unmormalized()
+ c.Assert(pp, qt.IsNotNil)
+ c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "Foo BAR")
+ },
+ },
+ {
+ "Basic Markdown file",
+ "/a/b/c.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.IsContent(), qt.IsTrue)
+ c.Assert(p.IsLeafBundle(), qt.IsFalse)
+ c.Assert(p.Name(), qt.Equals, "c.md")
+ c.Assert(p.Base(), qt.Equals, "/a/b/c")
+ c.Assert(p.Section(), qt.Equals, "a")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "c")
+ c.Assert(p.Path(), qt.Equals, "/a/b/c.md")
+ c.Assert(p.Dir(), qt.Equals, "/a/b")
+ c.Assert(p.Container(), qt.Equals, "b")
+ c.Assert(p.ContainerDir(), qt.Equals, "/a/b")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ },
+ },
+ {
+ "Content resource",
+ "/a/b.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b.md")
+ c.Assert(p.Base(), qt.Equals, "/a/b")
+ c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b")
+ c.Assert(p.Section(), qt.Equals, "a")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b")
+
+ // Reclassify it as a content resource.
+ ModifyPathBundleTypeResource(p)
+ c.Assert(p.BundleType(), qt.Equals, PathTypeContentResource)
+ c.Assert(p.IsContent(), qt.IsTrue)
+ c.Assert(p.Name(), qt.Equals, "b.md")
+ c.Assert(p.Base(), qt.Equals, "/a/b.md")
+ },
+ },
+ {
+ "No ext",
+ "/a/b",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b")
+ c.Assert(p.NameNoExt(), qt.Equals, "b")
+ c.Assert(p.Base(), qt.Equals, "/a/b")
+ c.Assert(p.Ext(), qt.Equals, "")
+ },
+ },
+ {
+ "No ext, trailing slash",
+ "/a/b/",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b")
+ c.Assert(p.Base(), qt.Equals, "/a/b")
+ c.Assert(p.Ext(), qt.Equals, "")
+ },
+ },
+ {
+ "Identifiers",
+ "/a/b.a.b.no.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Name(), qt.Equals, "b.a.b.no.txt")
+ c.Assert(p.NameNoIdentifier(), qt.Equals, "b.a.b")
+ c.Assert(p.NameNoLang(), qt.Equals, "b.a.b.txt")
+ c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"})
+ c.Assert(p.Base(), qt.Equals, "/a/b.a.b.txt")
+ c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b.a.b.txt")
+ c.Assert(p.PathNoLang(), qt.Equals, "/a/b.a.b.txt")
+ c.Assert(p.Ext(), qt.Equals, "txt")
+ c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b.a.b")
+ },
+ },
+ {
+ "Home branch cundle",
+ "/_index.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/")
+ c.Assert(p.Path(), qt.Equals, "/_index.md")
+ c.Assert(p.Container(), qt.Equals, "")
+ c.Assert(p.ContainerDir(), qt.Equals, "/")
+ },
+ },
+ {
+ "Index content file in root",
+ "/a/index.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "a")
+ c.Assert(p.Container(), qt.Equals, "a")
+ c.Assert(p.Container(), qt.Equals, "a")
+ c.Assert(p.ContainerDir(), qt.Equals, "")
+ c.Assert(p.Dir(), qt.Equals, "/a")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"})
+ c.Assert(p.IsBranchBundle(), qt.IsFalse)
+ c.Assert(p.IsBundle(), qt.IsTrue)
+ c.Assert(p.IsLeafBundle(), qt.IsTrue)
+ c.Assert(p.Lang(), qt.Equals, "")
+ c.Assert(p.NameNoExt(), qt.Equals, "index")
+ c.Assert(p.NameNoIdentifier(), qt.Equals, "index")
+ c.Assert(p.NameNoLang(), qt.Equals, "index.md")
+ c.Assert(p.Section(), qt.Equals, "")
+ },
+ },
+ {
+ "Index content file with lang",
+ "/a/b/index.no.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b")
+ c.Assert(p.Container(), qt.Equals, "b")
+ c.Assert(p.ContainerDir(), qt.Equals, "/a")
+ c.Assert(p.Dir(), qt.Equals, "/a/b")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"})
+ c.Assert(p.IsBranchBundle(), qt.IsFalse)
+ c.Assert(p.IsBundle(), qt.IsTrue)
+ c.Assert(p.IsLeafBundle(), qt.IsTrue)
+ c.Assert(p.Lang(), qt.Equals, "no")
+ c.Assert(p.NameNoExt(), qt.Equals, "index.no")
+ c.Assert(p.NameNoIdentifier(), qt.Equals, "index")
+ c.Assert(p.NameNoLang(), qt.Equals, "index.md")
+ c.Assert(p.PathNoLang(), qt.Equals, "/a/b/index.md")
+ c.Assert(p.Section(), qt.Equals, "a")
+ },
+ },
+ {
+ "Index branch content file",
+ "/a/b/_index.no.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b")
+ c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b")
+ c.Assert(p.Container(), qt.Equals, "b")
+ c.Assert(p.ContainerDir(), qt.Equals, "/a")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md", "no"})
+ c.Assert(p.IsBranchBundle(), qt.IsTrue)
+ c.Assert(p.IsBundle(), qt.IsTrue)
+ c.Assert(p.IsLeafBundle(), qt.IsFalse)
+ c.Assert(p.NameNoExt(), qt.Equals, "_index.no")
+ c.Assert(p.NameNoLang(), qt.Equals, "_index.md")
+ },
+ },
+ {
+ "Index root no slash",
+ "_index.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ c.Assert(p.Name(), qt.Equals, "_index.md")
+ },
+ },
+ {
+ "Index root",
+ "/_index.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/")
+ c.Assert(p.Ext(), qt.Equals, "md")
+ c.Assert(p.Name(), qt.Equals, "_index.md")
+ },
+ },
+ {
+ "Index first",
+ "/a/_index.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Section(), qt.Equals, "a")
+ },
+ },
+ {
+ "Index text file",
+ "/a/b/index.no.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b/index.txt")
+ c.Assert(p.Ext(), qt.Equals, "txt")
+ c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"})
+ c.Assert(p.IsLeafBundle(), qt.IsFalse)
+ c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b/index")
+ },
+ },
+ {
+ "Empty",
+ "",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/")
+ c.Assert(p.Ext(), qt.Equals, "")
+ c.Assert(p.Name(), qt.Equals, "")
+ c.Assert(p.Path(), qt.Equals, "/")
+ },
+ },
+ {
+ "Slash",
+ "/",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/")
+ c.Assert(p.Ext(), qt.Equals, "")
+ c.Assert(p.Name(), qt.Equals, "")
+ },
+ },
+ {
+ "Trim Leading Slash bundle",
+ "foo/bar/index.no.md",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Path(), qt.Equals, "/foo/bar/index.no.md")
+ pp := p.TrimLeadingSlash()
+ c.Assert(pp.Path(), qt.Equals, "foo/bar/index.no.md")
+ c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar/index.md")
+ c.Assert(pp.Base(), qt.Equals, "foo/bar")
+ c.Assert(pp.Dir(), qt.Equals, "foo/bar")
+ c.Assert(pp.ContainerDir(), qt.Equals, "foo")
+ c.Assert(pp.Container(), qt.Equals, "bar")
+ c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar")
+ },
+ },
+ {
+ "Trim Leading Slash file",
+ "foo/bar.txt",
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Path(), qt.Equals, "/foo/bar.txt")
+ pp := p.TrimLeadingSlash()
+ c.Assert(pp.Path(), qt.Equals, "foo/bar.txt")
+ c.Assert(pp.PathNoLang(), qt.Equals, "foo/bar.txt")
+ c.Assert(pp.Base(), qt.Equals, "foo/bar.txt")
+ c.Assert(pp.Dir(), qt.Equals, "foo")
+ c.Assert(pp.ContainerDir(), qt.Equals, "foo")
+ c.Assert(pp.Container(), qt.Equals, "foo")
+ c.Assert(pp.BaseNameNoIdentifier(), qt.Equals, "bar")
+ },
+ },
+ {
+ "File separator",
+ filepath.FromSlash("/a/b/c.txt"),
+ func(c *qt.C, p *Path) {
+ c.Assert(p.Base(), qt.Equals, "/a/b/c.txt")
+ c.Assert(p.Ext(), qt.Equals, "txt")
+ c.Assert(p.Name(), qt.Equals, "c.txt")
+ c.Assert(p.Path(), qt.Equals, "/a/b/c.txt")
+ },
+ },
+ }
+ for _, test := range tests {
+ c.Run(test.name, func(c *qt.C) {
+ test.assert(c, testParser.Parse(files.ComponentFolderContent, test.path))
+ })
+ }
+}
+
+func TestHasExt(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(HasExt("/a/b/c.txt"), qt.IsTrue)
+ c.Assert(HasExt("/a/b.c/d.txt"), qt.IsTrue)
+ c.Assert(HasExt("/a/b/c"), qt.IsFalse)
+ c.Assert(HasExt("/a/b.c/d"), qt.IsFalse)
+}
diff --git a/common/paths/paths_integration_test.go b/common/paths/paths_integration_test.go
new file mode 100644
index 00000000000..62d40f52702
--- /dev/null
+++ b/common/paths/paths_integration_test.go
@@ -0,0 +1,80 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestRemovePathAccents(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- hugo.toml --
+disableKinds = ["taxonomy", "term"]
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+[languages]
+[languages.en]
+weight = 1
+[languages.fr]
+weight = 2
+removePathAccents = true
+-- content/διακριτικός.md --
+-- content/διακριτικός.fr.md --
+-- layouts/_default/single.html --
+{{ .Language.Lang }}|Single.
+-- layouts/_default/list.html --
+List
+`
+ b := hugolib.Test(t, files)
+
+ b.AssertFileContent("public/en/διακριτικός/index.html", "en|Single")
+ b.AssertFileContent("public/fr/διακριτικος/index.html", "fr|Single")
+}
+
+func TestDisablePathToLower(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- hugo.toml --
+disableKinds = ["taxonomy", "term"]
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+[languages]
+[languages.en]
+weight = 1
+[languages.fr]
+weight = 2
+disablePathToLower = true
+-- content/MySection/MyPage.md --
+-- content/MySection/MyPage.fr.md --
+-- content/MySection/MyBundle/index.md --
+-- content/MySection/MyBundle/index.fr.md --
+-- layouts/_default/single.html --
+{{ .Language.Lang }}|Single.
+-- layouts/_default/list.html --
+{{ .Language.Lang }}|List.
+`
+ b := hugolib.Test(t, files)
+
+ b.AssertFileContent("public/en/mysection/index.html", "en|List")
+ b.AssertFileContent("public/en/mysection/mypage/index.html", "en|Single")
+ b.AssertFileContent("public/fr/MySection/index.html", "fr|List")
+ b.AssertFileContent("public/fr/MySection/MyPage/index.html", "fr|Single")
+ b.AssertFileContent("public/en/mysection/mybundle/index.html", "en|Single")
+ b.AssertFileContent("public/fr/MySection/MyBundle/index.html", "fr|Single")
+}
diff --git a/common/paths/pathtype_string.go b/common/paths/pathtype_string.go
new file mode 100644
index 00000000000..7a99f8a03dd
--- /dev/null
+++ b/common/paths/pathtype_string.go
@@ -0,0 +1,27 @@
+// Code generated by "stringer -type=PathType"; DO NOT EDIT.
+
+package paths
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[PathTypeFile-0]
+ _ = x[PathTypeContentResource-1]
+ _ = x[PathTypeContentSingle-2]
+ _ = x[PathTypeLeaf-3]
+ _ = x[PathTypeBranch-4]
+}
+
+const _PathType_name = "PathTypeFilePathTypeContentResourcePathTypeContentSinglePathTypeLeafPathTypeBranch"
+
+var _PathType_index = [...]uint8{0, 12, 35, 56, 68, 82}
+
+func (i PathType) String() string {
+ if i < 0 || i >= PathType(len(_PathType_index)-1) {
+ return "PathType(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _PathType_name[_PathType_index[i]:_PathType_index[i+1]]
+}
diff --git a/common/paths/url.go b/common/paths/url.go
index 093ba9ff7fa..4c4a7f2dc9c 100644
--- a/common/paths/url.go
+++ b/common/paths/url.go
@@ -184,3 +184,13 @@ func UrlToFilename(s string) (string, bool) {
return p, true
}
+
+// URLEscape escapes unicode letters.
+func URLEscape(uri string) string {
+ // escape unicode letters
+ u, err := url.Parse(uri)
+ if err != nil {
+ panic(err)
+ }
+ return u.String()
+}
diff --git a/common/predicate/predicate.go b/common/predicate/predicate.go
new file mode 100644
index 00000000000..f9cb1bb2b38
--- /dev/null
+++ b/common/predicate/predicate.go
@@ -0,0 +1,72 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package predicate
+
+// P is a predicate function that tests whether a value of type T satisfies some condition.
+type P[T any] func(T) bool
+
+// And returns a predicate that is a short-circuiting logical AND of this and the given predicates.
+func (p P[T]) And(ps ...P[T]) P[T] {
+ return func(v T) bool {
+ for _, pp := range ps {
+ if !pp(v) {
+ return false
+ }
+ }
+ return p(v)
+ }
+}
+
+// Or returns a predicate that is a short-circuiting logical OR of this and the given predicates.
+func (p P[T]) Or(ps ...P[T]) P[T] {
+ return func(v T) bool {
+ for _, pp := range ps {
+ if pp(v) {
+ return true
+ }
+ }
+ return p(v)
+ }
+}
+
+// Negate returns a predicate that is a logical negation of this predicate.
+func (p P[T]) Negate() P[T] {
+ return func(v T) bool {
+ return !p(v)
+ }
+}
+
+// Filter returns a new slice holding only the elements of s that satisfy p.
+// Filter modifies the contents of the slice s and returns the modified slice, which may have a smaller length.
+func (p P[T]) Filter(s []T) []T {
+ var n int
+ for _, v := range s {
+ if p(v) {
+ s[n] = v
+ n++
+ }
+ }
+ return s[:n]
+}
+
+// FilterCopy returns a new slice holding only the elements of s that satisfy p.
+func (p P[T]) FilterCopy(s []T) []T {
+ var result []T
+ for _, v := range s {
+ if p(v) {
+ result = append(result, v)
+ }
+ }
+ return result
+}
diff --git a/common/predicate/predicate_test.go b/common/predicate/predicate_test.go
new file mode 100644
index 00000000000..1e1ec004b89
--- /dev/null
+++ b/common/predicate/predicate_test.go
@@ -0,0 +1,83 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package predicate_test
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/predicate"
+)
+
+func TestAdd(t *testing.T) {
+ c := qt.New(t)
+
+ var p predicate.P[int] = intP1
+
+ c.Assert(p(1), qt.IsTrue)
+ c.Assert(p(2), qt.IsFalse)
+
+ neg := p.Negate()
+ c.Assert(neg(1), qt.IsFalse)
+ c.Assert(neg(2), qt.IsTrue)
+
+ and := p.And(intP2)
+ c.Assert(and(1), qt.IsFalse)
+ c.Assert(and(2), qt.IsFalse)
+ c.Assert(and(10), qt.IsTrue)
+
+ or := p.Or(intP2)
+ c.Assert(or(1), qt.IsTrue)
+ c.Assert(or(2), qt.IsTrue)
+ c.Assert(or(10), qt.IsTrue)
+ c.Assert(or(11), qt.IsFalse)
+}
+
+func TestFilter(t *testing.T) {
+ c := qt.New(t)
+
+ var p predicate.P[int] = intP1
+ p = p.Or(intP2)
+
+ ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2}
+
+ c.Assert(p.Filter(ints), qt.DeepEquals, []int{1, 2, 1, 2})
+ c.Assert(ints, qt.DeepEquals, []int{1, 2, 1, 2, 1, 6, 7, 8, 2})
+}
+
+func TestFilterCopy(t *testing.T) {
+ c := qt.New(t)
+
+ var p predicate.P[int] = intP1
+ p = p.Or(intP2)
+
+ ints := []int{1, 2, 3, 4, 1, 6, 7, 8, 2}
+
+ c.Assert(p.FilterCopy(ints), qt.DeepEquals, []int{1, 2, 1, 2})
+ c.Assert(ints, qt.DeepEquals, []int{1, 2, 3, 4, 1, 6, 7, 8, 2})
+}
+
+var intP1 = func(i int) bool {
+ if i == 10 {
+ return true
+ }
+ return i == 1
+}
+
+var intP2 = func(i int) bool {
+ if i == 10 {
+ return true
+ }
+ return i == 2
+}
diff --git a/common/rungroup/rungroup.go b/common/rungroup/rungroup.go
new file mode 100644
index 00000000000..96ec57883c5
--- /dev/null
+++ b/common/rungroup/rungroup.go
@@ -0,0 +1,93 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package rungroup
+
+import (
+ "context"
+
+ "golang.org/x/sync/errgroup"
+)
+
+// Group is a group of workers that can be used to enqueue work and wait for
+// them to finish.
+type Group[T any] interface {
+ Enqueue(T) error
+ Wait() error
+}
+
+type runGroup[T any] struct {
+ ctx context.Context
+ g *errgroup.Group
+ ch chan T
+}
+
+// Config is the configuration for a new Group.
+type Config[T any] struct {
+ NumWorkers int
+ Handle func(context.Context, T) error
+}
+
+// Run creates a new Group with the given configuration.
+func Run[T any](ctx context.Context, cfg Config[T]) Group[T] {
+ if cfg.NumWorkers <= 0 {
+ cfg.NumWorkers = 1
+ }
+ if cfg.Handle == nil {
+ panic("Handle must be set")
+ }
+
+ g, ctx := errgroup.WithContext(ctx)
+ // Buffered for performance.
+ ch := make(chan T, cfg.NumWorkers)
+
+ for i := 0; i < cfg.NumWorkers; i++ {
+ g.Go(func() error {
+ for {
+ select {
+ case <-ctx.Done():
+ return nil
+ case v, ok := <-ch:
+ if !ok {
+ return nil
+ }
+ if err := cfg.Handle(ctx, v); err != nil {
+ return err
+ }
+ }
+ }
+ })
+ }
+
+ return &runGroup[T]{
+ ctx: ctx,
+ g: g,
+ ch: ch,
+ }
+}
+
+// Enqueue enqueues a new item to be handled by the workers.
+func (r *runGroup[T]) Enqueue(t T) error {
+ select {
+ case <-r.ctx.Done():
+ return nil
+ case r.ch <- t:
+ }
+ return nil
+}
+
+// Wait waits for all workers to finish and returns the first error.
+func (r *runGroup[T]) Wait() error {
+ close(r.ch)
+ return r.g.Wait()
+}
diff --git a/common/rungroup/rungroup_test.go b/common/rungroup/rungroup_test.go
new file mode 100644
index 00000000000..ac902079e9c
--- /dev/null
+++ b/common/rungroup/rungroup_test.go
@@ -0,0 +1,44 @@
+// Copyright 2024 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package rungroup
+
+import (
+ "context"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestNew(t *testing.T) {
+ c := qt.New(t)
+
+ var result int
+ adder := func(ctx context.Context, i int) error {
+ result += i
+ return nil
+ }
+
+ g := Run[int](
+ context.Background(),
+ Config[int]{
+ Handle: adder,
+ },
+ )
+
+ c.Assert(g, qt.IsNotNil)
+ g.Enqueue(32)
+ g.Enqueue(33)
+ c.Assert(g.Wait(), qt.IsNil)
+ c.Assert(result, qt.Equals, 65)
+}
diff --git a/common/terminal/colors.go b/common/terminal/colors.go
index c4a78291ed4..8aa0e1af2a8 100644
--- a/common/terminal/colors.go
+++ b/common/terminal/colors.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/types/css/csstypes.go b/common/types/css/csstypes.go
index a31df00e768..061acfe647e 100644
--- a/common/types/css/csstypes.go
+++ b/common/types/css/csstypes.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/types/evictingqueue.go b/common/types/evictingqueue.go
index 8847624265d..88add59d539 100644
--- a/common/types/evictingqueue.go
+++ b/common/types/evictingqueue.go
@@ -35,11 +35,11 @@ func NewEvictingStringQueue(size int) *EvictingStringQueue {
}
// Add adds a new string to the tail of the queue if it's not already there.
-func (q *EvictingStringQueue) Add(v string) {
+func (q *EvictingStringQueue) Add(v string) *EvictingStringQueue {
q.mu.Lock()
if q.set[v] {
q.mu.Unlock()
- return
+ return q
}
if len(q.set) == q.size {
@@ -50,6 +50,17 @@ func (q *EvictingStringQueue) Add(v string) {
q.set[v] = true
q.vals = append(q.vals, v)
q.mu.Unlock()
+
+ return q
+}
+
+func (q *EvictingStringQueue) Len() int {
+ if q == nil {
+ return 0
+ }
+ q.mu.Lock()
+ defer q.mu.Unlock()
+ return len(q.vals)
}
// Contains returns whether the queue contains v.
diff --git a/common/types/hstring/stringtypes.go b/common/types/hstring/stringtypes.go
index 601218e0ed4..5e8e3a23dbf 100644
--- a/common/types/hstring/stringtypes.go
+++ b/common/types/hstring/stringtypes.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/types/hstring/stringtypes_test.go b/common/types/hstring/stringtypes_test.go
index 8fa1c9760de..2f1f865c8b5 100644
--- a/common/types/hstring/stringtypes_test.go
+++ b/common/types/hstring/stringtypes_test.go
@@ -1,4 +1,4 @@
-// Copyright 2022 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/common/types/types.go b/common/types/types.go
index c36c51b3e63..11683c196bc 100644
--- a/common/types/types.go
+++ b/common/types/types.go
@@ -92,5 +92,18 @@ type DevMarker interface {
DevOnly()
}
+// Unwrapper is implemented by types that can unwrap themselves.
+type Unwrapper interface {
+ // Unwrapv is for internal use only.
+ // It got its slightly odd name to prevent collisions with user types.
+ Unwrapv() any
+}
+
+// LowHigh is typically used to represent a slice boundary.
+type LowHigh struct {
+ Low int
+ High int
+}
+
// This is only used for debugging purposes.
var InvocationCounter atomic.Int64
diff --git a/common/urls/baseURL.go b/common/urls/baseURL.go
index df26730eccb..2958a2a0464 100644
--- a/common/urls/baseURL.go
+++ b/common/urls/baseURL.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -23,10 +23,12 @@ import (
// A BaseURL in Hugo is normally on the form scheme://path, but the
// form scheme: is also valid (mailto:hugo@rules.com).
type BaseURL struct {
- url *url.URL
- WithPath string
- WithoutPath string
- BasePath string
+ url *url.URL
+ WithPath string
+ WithPathNoTrailingSlash string
+ WithoutPath string
+ BasePath string
+ BasePathNoTrailingSlash string
}
func (b BaseURL) String() string {
@@ -92,19 +94,19 @@ func NewBaseURLFromString(b string) (BaseURL, error) {
return BaseURL{}, err
}
return newBaseURLFromURL(u)
-
}
func newBaseURLFromURL(u *url.URL) (BaseURL, error) {
- baseURL := BaseURL{url: u, WithPath: u.String()}
- var baseURLNoPath = baseURL.URL()
+ // A baseURL should always have a trailing slash, see #11669.
+ if !strings.HasSuffix(u.Path, "/") {
+ u.Path += "/"
+ }
+ baseURL := BaseURL{url: u, WithPath: u.String(), WithPathNoTrailingSlash: strings.TrimSuffix(u.String(), "/")}
+ baseURLNoPath := baseURL.URL()
baseURLNoPath.Path = ""
baseURL.WithoutPath = baseURLNoPath.String()
-
- basePath := u.Path
- if basePath != "" && basePath != "/" {
- baseURL.BasePath = basePath
- }
+ baseURL.BasePath = u.Path
+ baseURL.BasePathNoTrailingSlash = strings.TrimSuffix(u.Path, "/")
return baseURL, nil
}
diff --git a/common/urls/baseURL_test.go b/common/urls/baseURL_test.go
index 95dc7333974..ba337aac84c 100644
--- a/common/urls/baseURL_test.go
+++ b/common/urls/baseURL_test.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -21,17 +21,24 @@ import (
func TestBaseURL(t *testing.T) {
c := qt.New(t)
- b, err := NewBaseURLFromString("http://example.com")
+
+ b, err := NewBaseURLFromString("http://example.com/")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "http://example.com/")
+
+ b, err = NewBaseURLFromString("http://example.com")
c.Assert(err, qt.IsNil)
- c.Assert(b.String(), qt.Equals, "http://example.com")
+ c.Assert(b.String(), qt.Equals, "http://example.com/")
+ c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com")
+ c.Assert(b.BasePath, qt.Equals, "/")
p, err := b.WithProtocol("webcal://")
c.Assert(err, qt.IsNil)
- c.Assert(p.String(), qt.Equals, "webcal://example.com")
+ c.Assert(p.String(), qt.Equals, "webcal://example.com/")
p, err = b.WithProtocol("webcal")
c.Assert(err, qt.IsNil)
- c.Assert(p.String(), qt.Equals, "webcal://example.com")
+ c.Assert(p.String(), qt.Equals, "webcal://example.com/")
_, err = b.WithProtocol("mailto:")
c.Assert(err, qt.Not(qt.IsNil))
@@ -57,11 +64,18 @@ func TestBaseURL(t *testing.T) {
b, err = NewBaseURLFromString("")
c.Assert(err, qt.IsNil)
- c.Assert(b.String(), qt.Equals, "")
+ c.Assert(b.String(), qt.Equals, "/")
// BaseURL with sub path
b, err = NewBaseURLFromString("http://example.com/sub")
c.Assert(err, qt.IsNil)
- c.Assert(b.String(), qt.Equals, "http://example.com/sub")
+ c.Assert(b.String(), qt.Equals, "http://example.com/sub/")
+ c.Assert(b.WithPathNoTrailingSlash, qt.Equals, "http://example.com/sub")
+ c.Assert(b.BasePath, qt.Equals, "/sub/")
+ c.Assert(b.BasePathNoTrailingSlash, qt.Equals, "/sub")
+
+ b, err = NewBaseURLFromString("http://example.com/sub/")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "http://example.com/sub/")
c.Assert(b.HostURL(), qt.Equals, "http://example.com")
}
diff --git a/compare/compare.go b/compare/compare.go
index 67bb1c1256d..fd15bd0874e 100644
--- a/compare/compare.go
+++ b/compare/compare.go
@@ -52,3 +52,16 @@ func Eq(v1, v2 any) bool {
return v1 == v2
}
+
+// ProbablyEq returns whether v1 is probably equal to v2.
+func ProbablyEq(v1, v2 any) bool {
+ if Eq(v1, v2) {
+ return true
+ }
+
+ if peqer, ok := v1.(ProbablyEqer); ok {
+ return peqer.ProbablyEq(v2)
+ }
+
+ return false
+}
diff --git a/config/allconfig/allconfig.go b/config/allconfig/allconfig.go
index 9f0d73ecda3..5788e792bdb 100644
--- a/config/allconfig/allconfig.go
+++ b/config/allconfig/allconfig.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -30,6 +30,7 @@ import (
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/urls"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/privacy"
@@ -283,12 +284,13 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
disabledLangs := make(map[string]bool)
for _, lang := range c.DisableLanguages {
- if lang == c.DefaultContentLanguage {
- return fmt.Errorf("cannot disable default content language %q", lang)
- }
disabledLangs[lang] = true
}
for lang, language := range c.Languages {
+ if !language.Disabled && disabledLangs[lang] {
+ language.Disabled = true
+ c.Languages[lang] = language
+ }
if language.Disabled {
disabledLangs[lang] = true
if lang == c.DefaultContentLanguage {
@@ -408,15 +410,19 @@ type ConfigCompiled struct {
}
// This may be set after the config is compiled.
-func (c *ConfigCompiled) SetMainSectionsIfNotSet(sections []string) {
+func (c *ConfigCompiled) SetMainSections(sections []string) {
c.mu.Lock()
defer c.mu.Unlock()
- if c.MainSections != nil {
- return
- }
c.MainSections = sections
}
+// IsMainSectionsSet returns whether the main sections have been set.
+func (c *ConfigCompiled) IsMainSectionsSet() bool {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.MainSections != nil
+}
+
// This is set after the config is compiled by the server command.
func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) {
c.BaseURL = baseURL
@@ -425,7 +431,6 @@ func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) {
// RootConfig holds all the top-level configuration options in Hugo
type RootConfig struct {
-
// The base URL of the site.
// Note that the default value is empty, but Hugo requires a valid URL (e.g. "https://example.com/") to work properly.
//
") - closingPTag = []byte("
") - paragraphIndicator = []byte("") + closingPTag = []byte("
") ) // ContentSpec provides functionality to render markdown content. type ContentSpec struct { Converters markup.ConverterProvider anchorNameSanitizer converter.AnchorNameSanitizer - getRenderer func(t hooks.RendererType, id any) any - - Cfg config.AllProvider + Cfg config.AllProvider } // NewContentSpec returns a ContentSpec initialized diff --git a/helpers/content_test.go b/helpers/content_test.go index 72e3eeb495a..e2bf501d2dc 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,8 +24,6 @@ import ( "github.com/gohugoio/hugo/helpers" ) -const tstHTMLContent = "This is some text.
And some more.
Home Content.
\n|HTML", + "Site last mod: 2001-01-01", + "Home last mod: 2001-01-01", + "Translations: 1|", + "Len home.RegularPagesRecursive: 2|", + "Len site.RegularPages: 2|", + "Len site.Pages: 8|", + "Len site.AllPages: 16|", + "GetPage: /en/posts/p1/|Post 1|", + "RenderString with shortcode: Hello.|", + "Paginate: 1/2|", ) - - b.WithTemplates( - "_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}", - "_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator, - "_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate, - "_default/single.json", "JSON: Single"+commonPageTemplate, - - // For .Render test - "_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate, - "_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate, - "_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate, - - "404.html", "{{ .Kind }}|{{ .Title }}|Page not found", - - "shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate, - "shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate, + b.AssertFileContent("public/en/page/2/index.html", "Paginate: 2/2|") + + b.AssertFileContent("public/no/index.html", + "Home: no|home|/no/|Hjem|Hjem Innhold.
\n|HTML", + "Site last mod: 2002-02-02", + "Home last mod: 2002-02-02", + "Translations: 1", + "GetPage: /no/posts/p1/|Post 1 no|", ) - b.CreateSites().Build(BuildCfg{}) + b.AssertFileContent("public/en/index.json", "Home:en|home|/en/|Home in English|Home Content.
\n|JSON") + b.AssertFileContent("public/no/index.json", "Home:no|home|/no/|Hjem|Hjem Innhold.
\n|JSON") - b.AssertFileContent("public/blog/page1/index.html", - "This is content with some shortcodes.", - "Page with outputs", - "Pages: Pages(0)", - "RelPermalink: /blog/page1/|", - "Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.", - "Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.", - "Prev: /blog/page10/|Next: /blog/mybundle/", - "PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/", - "Summary: This is summary.", - "CurrentSection: Page(/blog)", + b.AssertFileContent("public/en/posts/p1/index.html", + "Single: en|page|/en/posts/p1/|Post 1|Content 1.
\n|Len Resources: 2|", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:enicon] - page||application/octet-stream|map[draft:false iscjklanguage:false title:Post Sub 1] -", + "Icon: enicon", + "Icon fingerprinted: enicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "NextInSection: |\nPrevInSection: /en/posts/p2/|Post 2|", + "GetTerms: name: tag1, title: Tag 1|", ) - b.AssertFileContent("public/blog/page1/index.json", - "JSON: Single|page|Page with outputs|", - "SON: Shortcode: |sc|0||") - - b.AssertFileContent("public/index.html", - "home|In English", - "Site params: Rules", - "Pages: Pages(6)|Data Pages: Pages(6)", - "Paginator: 1", - "First Site: In English", - "RelPermalink: /", + b.AssertFileContent("public/no/posts/p1/index.html", + "Resources: 1", + "Resources: text|/en/posts/p1/f1.txt|text/plain|map[icon:noicon] -", + "Icon: noicon", + "Icon fingerprinted: noicon|/en/posts/p1/f1.e5746577af5cbfc4f34c558051b7955a9a5a795a84f1c6ab0609cb3473a924cb.txt|", + "Background: post.jpg", + "NextInSection: |\nPrevInSection: /no/posts/p2/|Post 2 No|", ) - b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/") - - // Check RSS - rssHome := b.FileContent("public/index.xml") - c.Assert(rssHome, qt.Contains, `Content 2.
\n|", + "|Len Resources: 0", + "GetTerms: name: tag1, title: Tag 1|name: tag3, title: Tag3|", ) + b.AssertFileContent("public/no/posts/p2/index.html", "Single: no|page|/no/posts/p2/|Post 2 No|Content 2 No.
\n|") - // Check bundled resources - b.AssertFileContent( - "public/blog/mybundle/index.html", - "Resources: 1", + b.AssertFileContent("public/no/categories/index.html", + "Kind: taxonomy", + "Type: categories", ) - - // Check pages in root section - b.AssertFileContent( - "public/root3/index.html", - "Single|page|Page with outputs|root3.md|", - "Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/", + b.AssertFileContent("public/no/tags/index.html", + "Kind: taxonomy", + "Type: tags", ) - b.AssertFileContent( - "public/root3/index.json", "Shortcode 1: JSON:") - - // Paginators - b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`) - b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2") + b.AssertFileContent("public/no/tags/tag1/index.html", + "Background: term.jpg", + "Kind: term", + "Type: tags", + "Paginate: 1/1|", + ) - // 404 - b.AssertFileContent("public/404.html", "404|404 Page not found") + b.AssertFileContent("public/en/tags/tag1/index.html", + "Kind: term", + "Type: tags", + "Paginate: 1/2|", + ) +} - // Sitemaps - b.AssertFileContent("public/en/sitemap.xml", "This is summary.
|", + "Truncated: true|", + "Content:This is summary.
\nThis is content.
|", + ) +} + +func TestSummaryManualSplitHTML(t *testing.T) { + t.Parallel() + Test(t, ` +-- hugo.toml -- +-- content/simple.html -- +--- +title: Simple +--- +This is summary.") +} + // #2973 func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages page.Pages) { @@ -1190,26 +1315,89 @@ func TestPagePaths(t *testing.T) { } func TestTranslationKey(t *testing.T) { - t.Parallel() - c := qt.New(t) - cfg, fs := newTestCfg() - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/p1.en.md -- +--- +translationkey: "adfasdf" +title: "p1 en" +--- +-- content/sect/p1.nn.md -- +--- +translationkey: "adfasdf" +title: "p1 nn" +--- +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Translations: {{ range .Translations }}{{ .Language.Lang }}|{{ end }}| +AllTranslations: {{ range .AllTranslations }}{{ .Language.Lang }}|{{ end }}| - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.no.md")), "---\ntitle: \"A1\"\ntranslationKey: \"k1\"\n---\nContent\n") - writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.en.md")), "---\ntitle: \"A2\"\n---\nContent\n") +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/p1/index.html", + "TranslationKey: adfasdf|", + "AllTranslations: en|nn||", + "Translations: nn||", + ) - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) + b.AssertFileContent("public/nn/sect/p1/index.html", + "TranslationKey: adfasdf|", + "Translations: en||", + "AllTranslations: en|nn||", + ) +} - c.Assert(len(s.RegularPages()), qt.Equals, 2) +// Issue #11540. +func TestTranslationKeyResourceSharing(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/sect/mybundle_en/index.en.md -- +--- +translationkey: "adfasdf" +title: "mybundle en" +--- +-- content/sect/mybundle_en/f1.txt -- +f1.en +-- content/sect/mybundle_en/f2.txt -- +f2.en +-- content/sect/mybundle_nn/index.nn.md -- +--- +translationkey: "adfasdf" +title: "mybundle nn" +--- +-- content/sect/mybundle_nn/f2.nn.txt -- +f2.nn +-- layouts/_default/single.html -- +Title: {{ .Title }}|TranslationKey: {{ .TranslationKey }}| +Resources: {{ range .Resources }}{{ .RelPermalink }}|{{ .Content }}|{{ end }}| - home := s.Home() - c.Assert(home, qt.Not(qt.IsNil)) - c.Assert(home.TranslationKey(), qt.Equals, "home") - c.Assert(s.RegularPages()[0].TranslationKey(), qt.Equals, "page/k1") - p2 := s.RegularPages()[1] +` + b := Test(t, files) + b.AssertFileContent("public/en/sect/mybundle_en/index.html", + "TranslationKey: adfasdf|", + "Resources: /en/sect/mybundle_en/f1.txt|f1.en|/en/sect/mybundle_en/f2.txt|f2.en||", + ) - c.Assert(p2.TranslationKey(), qt.Equals, "page/sect/simple") + b.AssertFileContent("public/nn/sect/mybundle_nn/index.html", + "TranslationKey: adfasdf|", + "Title: mybundle nn|TranslationKey: adfasdf|\nResources: /en/sect/mybundle_en/f1.txt|f1.en|/nn/sect/mybundle_nn/f2.nn.txt|f2.nn||", + ) } func TestChompBOM(t *testing.T) { @@ -1383,12 +1571,6 @@ Content:{{ .Content }} ) } -// https://github.com/gohugoio/hugo/issues/5781 -func TestPageWithZeroFile(t *testing.T) { - newTestSitesBuilder(t).WithLogger(loggers.NewDefault()).WithSimpleConfigFile(). - WithTemplatesAdded("index.html", "{{ .File.Filename }}{{ with .File }}{{ .Dir }}{{ end }}").Build(BuildCfg{}) -} - func TestHomePageWithNoTitle(t *testing.T) { b := newTestSitesBuilder(t).WithConfigFile("toml", ` title = "Site Title" @@ -1499,93 +1681,45 @@ func TestShouldBuildWithClock(t *testing.T) { } } -// "dot" in path: #1885 and #2110 -// disablePathToLower regression: #3374 -func TestPathIssues(t *testing.T) { - for _, disablePathToLower := range []bool{false, true} { - for _, uglyURLs := range []bool{false, true} { - disablePathToLower := disablePathToLower - uglyURLs := uglyURLs - t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) { - t.Parallel() - cfg, fs := newTestCfg() - c := qt.New(t) - - cfg.Set("permalinks", map[string]string{ - "post": ":section/:title", - }) - - cfg.Set("uglyURLs", uglyURLs) - cfg.Set("disablePathToLower", disablePathToLower) - cfg.Set("paginate", 1) - th, configs := newTestHelperFromProvider(cfg, fs, t) - - writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "
{{.Content}}") - writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), - "P{{.Paginator.PageNumber}}|URL: {{.Paginator.URL}}|{{ if .Paginator.HasNext }}Next: {{.Paginator.Next.URL }}{{ end }}") - - for i := 0; i < 3; i++ { - writeSource(t, fs, filepath.Join("content", "post", fmt.Sprintf("doc%d.md", i)), - fmt.Sprintf(`--- -title: "test%d.dot" -tags: -- ".net" ---- -# doc1 -*some content*`, i)) - } - - writeSource(t, fs, filepath.Join("content", "Blog", "Blog1.md"), - fmt.Sprintf(`--- -title: "testBlog" -tags: -- "Blog" ---- -# doc1 -*some blog content*`)) - - s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{}) - - c.Assert(len(s.RegularPages()), qt.Equals, 4) - - pathFunc := func(s string) string { - if uglyURLs { - return strings.Replace(s, "/index.html", ".html", 1) - } - return s - } - - blog := "blog" - - if disablePathToLower { - blog = "Blog" - } - - th.assertFileContent(pathFunc("public/"+blog+"/"+blog+"1/index.html"), "some blog content") - - th.assertFileContent(pathFunc("public/post/test0.dot/index.html"), "some content") - - if uglyURLs { - th.assertFileContent("public/post/page/1.html", `canonical" href="/post.html"`) - th.assertFileContent("public/post.html", `P1|URL: /post.html|Next: /post/page/2.html`) - th.assertFileContent("public/post/page/2.html", `P2|URL: /post/page/2.html|Next: /post/page/3.html`) - } else { - th.assertFileContent("public/post/page/1/index.html", `canonical" href="/post/"`) - th.assertFileContent("public/post/index.html", `P1|URL: /post/|Next: /post/page/2/`) - th.assertFileContent("public/post/page/2/index.html", `P2|URL: /post/page/2/|Next: /post/page/3/`) - th.assertFileContent("public/tags/.net/index.html", `P1|URL: /tags/.net/|Next: /tags/.net/page/2/`) - - } - - p := s.RegularPages()[0] - if uglyURLs { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot.html") - } else { - c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot/") - } - }) - } - } +// See https://github.com/gohugoio/hugo/issues/9171 +// We redefined disablePathToLower in v0.121.0. +func TestPagePathDisablePathToLower(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "http://example.com" +disablePathToLower = true +[permalinks] +sect2 = "/:section/:filename/" +sect3 = "/:section/:title/" +-- content/sect/p1.md -- +--- +title: "Page1" +--- +p1. +-- content/sect/p2.md -- +--- +title: "Page2" +slug: "PaGe2" +--- +p2. +-- content/sect2/PaGe3.md -- +--- +title: "Page3" +--- +-- content/seCt3/p4.md -- +--- +title: "Pag.E4" +slug: "PaGe4" +--- +p4. +-- layouts/_default/single.html -- +Single: {{ .Title}}|{{ .RelPermalink }}|{{ .Path }}| +` + b := Test(t, files) + b.AssertFileContent("public/sect/p1/index.html", "Single: Page1|/sect/p1/|/sect/p1") + b.AssertFileContent("public/sect/PaGe2/index.html", "Single: Page2|/sect/PaGe2/|/sect/p2") + b.AssertFileContent("public/sect2/page3/index.html", "Single: Page3|/sect2/page3/|/sect2/page3|") + b.AssertFileContent("public/sect3/Pag.E4/index.html", "Single: Pag.E4|/sect3/Pag.E4/|/sect3/p4|") } // https://github.com/gohugoio/hugo/issues/4675 @@ -1711,50 +1845,6 @@ title: Scratch Me! b.AssertFileContent("public/scratchme/index.html", "C: cv") } -func TestScratchRebuild(t *testing.T) { - t.Parallel() - - files := ` --- config.toml -- --- content/p1.md -- ---- -title: "p1" ---- -{{< scratchme >}} --- layouts/shortcodes/foo.html -- -notused --- layouts/shortcodes/scratchme.html -- -{{ .Page.Scratch.Set "scratch" "foo" }} -{{ .Page.Store.Set "scratch" "bar" }} --- layouts/_default/single.html -- -{{ .Content }} -Scratch: {{ .Scratch.Get "scratch" }}| -Store: {{ .Store.Get "scratch" }}| -` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: foo| -Store: bar| - `) - - b.EditFiles("layouts/shortcodes/foo.html", "edit") - - b.Build() - - b.AssertFileContent("public/p1/index.html", ` -Scratch: | -Store: bar| - `) -} - func TestPageParam(t *testing.T) { t.Parallel() @@ -1879,27 +1969,6 @@ Link with URL as text `) } -func TestPageCaseIssues(t *testing.T) { - t.Parallel() - - b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `defaultContentLanguage = "no" -[languages] -[languages.NO] -title = "Norsk" -`) - b.WithContent("a/B/C/Page1.md", "---\ntitle: Page1\n---") - b.WithTemplates("index.html", ` -{{ $p1 := site.GetPage "a/B/C/Page1" }} -Lang: {{ .Lang }} -Page1: {{ $p1.Path }} -`) - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/index.html", "Lang: no", filepath.FromSlash("Page1: a/B/C/Page1.md")) -} - func TestPageHashString(t *testing.T) { files := ` -- config.toml -- @@ -1930,6 +1999,8 @@ title: "p2" p2 := b.H.Sites[0].RegularPages()[1] sites := p1.Sites() + b.Assert(p1, qt.Not(qt.Equals), p2) + b.Assert(identity.HashString(p1), qt.Not(qt.Equals), identity.HashString(p2)) b.Assert(identity.HashString(sites[0]), qt.Not(qt.Equals), identity.HashString(sites[1])) } diff --git a/hugolib/page_unwrap.go b/hugolib/page_unwrap.go index c3e1ce8dddf..c22ff2174e9 100644 --- a/hugolib/page_unwrap.go +++ b/hugolib/page_unwrap.go @@ -16,6 +16,7 @@ package hugolib import ( "fmt" + "github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/resources/page" ) @@ -31,6 +32,8 @@ func unwrapPage(in any) (page.Page, error) { return v, nil case pageWrapper: return v.page(), nil + case types.Unwrapper: + return unwrapPage(v.Unwrapv()) case page.Page: return v, nil case nil: diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index 64d32983285..123d752e0f4 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -15,21 +15,14 @@ package hugolib import ( "fmt" - "io" "os" - "path" "path/filepath" - "regexp" + "testing" "github.com/gohugoio/hugo/common/loggers" - "strings" - "testing" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" @@ -44,454 +37,180 @@ import ( qt "github.com/frankban/quicktest" ) -func TestPageBundlerSiteRegular(t *testing.T) { - c := qt.New(t) - baseBaseURL := "https://example.com" - - for _, baseURLPath := range []string{"", "/hugo"} { - for _, canonify := range []bool{false, true} { - for _, ugly := range []bool{false, true} { - baseURLPathId := baseURLPath - if baseURLPathId == "" { - baseURLPathId = "NONE" - } - ugly := ugly - canonify := canonify - c.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId), - func(c *qt.C) { - c.Parallel() - baseURL := baseBaseURL + baseURLPath - relURLBase := baseURLPath - if canonify { - relURLBase = "" - } - fs, cfg := newTestBundleSources(c) - cfg.Set("baseURL", baseURL) - cfg.Set("canonifyURLs", canonify) - cfg.Set("defaultContentLanguageInSubdir", false) - - cfg.Set("permalinks", map[string]string{ - "a": ":sections/:filename", - "b": ":year/:slug/", - "c": ":sections/:slug", - "/": ":filename/", - }) - - cfg.Set("outputFormats", map[string]any{ - "CUSTOMO": map[string]any{ - "mediaType": "text/html", - "baseName": "cindex", - "path": "cpath", - "permalinkable": true, - }, - }) - - cfg.Set("outputs", map[string]any{ - "home": []string{"HTML", "CUSTOMO"}, - "page": []string{"HTML", "CUSTOMO"}, - "section": []string{"HTML", "CUSTOMO"}, - }) - - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(c, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - - b.Build(BuildCfg{}) - - s := b.H.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - - singlePage := s.getPage(kinds.KindPage, "a/1.md") - c.Assert(singlePage.BundleType(), qt.Equals, files.ContentClass("")) - - c.Assert(singlePage, qt.Not(qt.IsNil)) - c.Assert(s.getPage("page", "a/1"), qt.Equals, singlePage) - c.Assert(s.getPage("page", "1"), qt.Equals, singlePage) - - c.Assert(content(singlePage), qt.Contains, "TheContent") - - relFilename := func(basePath, outBase string) (string, string) { - rel := basePath - if ugly { - rel = strings.TrimSuffix(basePath, "/") + ".html" - } - - var filename string - if !ugly { - filename = path.Join(basePath, outBase) - } else { - filename = rel - } - - rel = fmt.Sprintf("%s%s", relURLBase, rel) - - return rel, filename - } - - // Check both output formats - rel, filename := relFilename("/a/1/", "index.html") - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - rel, filename = relFilename("/cpath/a/1/", "cindex.html") - - b.AssertFileContent(filepath.Join("public", filename), - "TheContent", - "Single RelPermalink: "+rel, - ) - - b.AssertFileContent(filepath.FromSlash("public/images/hugo-logo.png"), "content") - - // This should be just copied to destination. - b.AssertFileContent(filepath.FromSlash("public/assets/pic1.png"), "content") - - leafBundle1 := s.getPage(kinds.KindPage, "b/my-bundle/index.md") - c.Assert(leafBundle1, qt.Not(qt.IsNil)) - c.Assert(leafBundle1.BundleType(), qt.Equals, files.ContentClassLeaf) - c.Assert(leafBundle1.Section(), qt.Equals, "b") - sectionB := s.getPage(kinds.KindSection, "b") - c.Assert(sectionB, qt.Not(qt.IsNil)) - home := s.Home() - c.Assert(home.BundleType(), qt.Equals, files.ContentClassBranch) - - // This is a root bundle and should live in the "home section" - // See https://github.com/gohugoio/hugo/issues/4332 - rootBundle := s.getPage(kinds.KindPage, "root") - c.Assert(rootBundle, qt.Not(qt.IsNil)) - c.Assert(rootBundle.Parent().IsHome(), qt.Equals, true) - if !ugly { - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/") - b.AssertFileContent(filepath.FromSlash("public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/") - } - - leafBundle2 := s.getPage(kinds.KindPage, "a/b/index.md") - c.Assert(leafBundle2, qt.Not(qt.IsNil)) - unicodeBundle := s.getPage(kinds.KindPage, "c/bundle/index.md") - c.Assert(unicodeBundle, qt.Not(qt.IsNil)) - - pageResources := leafBundle1.Resources().ByType(pageResourceType) - c.Assert(len(pageResources), qt.Equals, 2) - firstPage := pageResources[0].(page.Page) - secondPage := pageResources[1].(page.Page) - - c.Assert(firstPage.File().Filename(), qt.Equals, filepath.FromSlash("/work/base/b/my-bundle/1.md")) - c.Assert(content(firstPage), qt.Contains, "TheContent") - c.Assert(len(leafBundle1.Resources()), qt.Equals, 6) - - // Verify shortcode in bundled page - c.Assert(content(secondPage), qt.Contains, filepath.FromSlash("MyShort in b/my-bundle/2.md")) - - // https://github.com/gohugoio/hugo/issues/4582 - c.Assert(firstPage.Parent(), qt.Equals, leafBundle1) - c.Assert(secondPage.Parent(), qt.Equals, leafBundle1) - - c.Assert(pageResources.GetMatch("1*"), qt.Equals, firstPage) - c.Assert(pageResources.GetMatch("2*"), qt.Equals, secondPage) - c.Assert(pageResources.GetMatch("doesnotexist*"), qt.IsNil) - - imageResources := leafBundle1.Resources().ByType("image") - c.Assert(len(imageResources), qt.Equals, 3) - - c.Assert(leafBundle1.OutputFormats().Get("CUSTOMO"), qt.Not(qt.IsNil)) - - relPermalinker := func(s string) string { - return fmt.Sprintf(s, relURLBase) - } - - permalinker := func(s string) string { - return fmt.Sprintf(s, baseURL) - } - - if ugly { - b.AssertFileContent("public/2017/pageslug.html", - relPermalinker("Single RelPermalink: %s/2017/pageslug.html"), - permalinker("Single Permalink: %s/2017/pageslug.html"), - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - } else { - b.AssertFileContent("public/2017/pageslug/index.html", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) - - b.AssertFileContent("public/cpath/2017/pageslug/cindex.html", - relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"), - relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"), - relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"), - ) - } - - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/c/logo.png"), "content") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/c/logo.png"), "content") - c.Assert(b.CheckExists("public/cpath/cpath/2017/pageslug/c/logo.png"), qt.Equals, false) - - // Custom media type defined in site config. - c.Assert(len(leafBundle1.Resources().ByType("bepsays")), qt.Equals, 1) - - if ugly { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug.html"), - "TheContent", - relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), - permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"), - "Thumb Width: 123", - "Thumb Name: my-sunset-1", - relPermalinker("Short Sunset RelPermalink: %s/2017/pageslug/sunset2.jpg"), - "Short Thumb Width: 56", - "1: Image Title: Sunset Galore 1", - "1: Image Params: map[myparam:My Sunny Param]", - relPermalinker("1: Image RelPermalink: %s/2017/pageslug/sunset1.jpg"), - "2: Image Title: Sunset Galore 2", - "2: Image Params: map[myparam:My Sunny Param]", - "1: Image myParam: Lower: My Sunny Param Caps: My Sunny Param", - "0: Page Title: Bundle Galore", - ) - - // https://github.com/gohugoio/hugo/issues/5882 - b.AssertFileContent( - filepath.FromSlash("public/2017/pageslug.html"), "0: Page RelPermalink: |") - - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug.html"), "TheContent") - - // 은행 - b.AssertFileContent(filepath.FromSlash("public/c/은행/logo-은행.png"), "은행 PNG") - - } else { - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/cindex.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "Single Title") - b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single Title") - - } - }) - } - } - } -} - -func TestPageBundlerSiteMultilingual(t *testing.T) { +func TestPageBundlerBundleInRoot(t *testing.T) { t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/root/index.md -- +--- +title: "Root" +--- +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Tree: Section: {{ .Section }}|CurrentSection: {{ .CurrentSection.RelPermalink }}|Parent: {{ .Parent.RelPermalink }}|FirstSection: {{ .FirstSection.RelPermalink }} +` + b := Test(t, files) - for _, ugly := range []bool{false, true} { - ugly := ugly - t.Run(fmt.Sprintf("ugly=%t", ugly), - func(t *testing.T) { - t.Parallel() - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("uglyURLs", ugly) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) - - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) - - sites := b.H - - c.Assert(len(sites.Sites), qt.Equals, 2) - - s := sites.Sites[0] - - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // dumpPages(s.AllPages()...) - - c.Assert(len(s.AllPages()), qt.Equals, 31) - - bundleWithSubPath := s.getPage(kinds.KindPage, "lb/index") - c.Assert(bundleWithSubPath, qt.Not(qt.IsNil)) - - // See https://github.com/gohugoio/hugo/issues/4312 - // Before that issue: - // A bundle in a/b/index.en.md - // a/b/index.en.md => OK - // a/b/index => OK - // index.en.md => ambiguous, but OK. - // With bundles, the file name has little meaning, the folder it lives in does. So this should also work: - // a/b - // and probably also just b (aka "my-bundle") - // These may also be translated, so we also need to test that. - // "bf", "my-bf-bundle", "index.md + nn - bfBundle := s.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundle, qt.Not(qt.IsNil)) - c.Assert(bfBundle.Language().Lang, qt.Equals, "en") - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle/index.md"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundle) - c.Assert(s.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundle) - - nnSite := sites.Sites[1] - c.Assert(len(nnSite.RegularPages()), qt.Equals, 7) - - bfBundleNN := nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index") - c.Assert(bfBundleNN, qt.Not(qt.IsNil)) - c.Assert(bfBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle/index.nn.md"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundleNN) - c.Assert(nnSite.getPage(kinds.KindPage, "my-bf-bundle"), qt.Equals, bfBundleNN) - - // See https://github.com/gohugoio/hugo/issues/4295 - // Every resource should have its Name prefixed with its base folder. - cBundleResources := bundleWithSubPath.Resources().Match("c/**") - c.Assert(len(cBundleResources), qt.Equals, 4) - bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*") - c.Assert(bundlePage, qt.Not(qt.IsNil)) - - bcBundleNN, _ := nnSite.getPageNew(nil, "bc") - c.Assert(bcBundleNN, qt.Not(qt.IsNil)) - bcBundleEN, _ := s.getPageNew(nil, "bc") - c.Assert(bcBundleNN.Language().Lang, qt.Equals, "nn") - c.Assert(bcBundleEN.Language().Lang, qt.Equals, "en") - c.Assert(len(bcBundleNN.Resources()), qt.Equals, 3) - c.Assert(len(bcBundleEN.Resources()), qt.Equals, 3) - b.AssertFileContent("public/en/bc/data1.json", "data1") - b.AssertFileContent("public/en/bc/data2.json", "data2") - b.AssertFileContent("public/en/bc/logo-bc.png", "logo") - b.AssertFileContent("public/nn/bc/data1.nn.json", "data1.nn") - b.AssertFileContent("public/nn/bc/data2.json", "data2") - b.AssertFileContent("public/nn/bc/logo-bc.png", "logo") - }) - } + b.AssertFileContent("public/root/index.html", + "Basic: Root|page|leaf|/root/|", + "Tree: Section: |CurrentSection: /|Parent: /|FirstSection: /", + ) } -func TestMultilingualDisableLanguage(t *testing.T) { +func TestPageBundlerShortcodeInBundledPage(t *testing.T) { t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +-- content/section/mybundle/index.md -- +--- +title: "Mybundle" +--- +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- - c := qt.New(t) - fs, cfg := newTestBundleSourcesMultilingual(t) - cfg.Set("disableLanguages", []string{"nn"}) - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) +P1 content. - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Configs: configs}).WithNothingAdded() - b.Build(BuildCfg{}) - sites := b.H +{{< myShort >}} - c.Assert(len(sites.Sites), qt.Equals, 1) +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|{{ with .Resources.Get "p1.md" }}Title: {{ .Title }}|Content: {{ .Content }}{{ end }}| +-- layouts/shortcodes/myShort.html -- +MyShort. - s := sites.Sites[0] +` + b := Test(t, files) - c.Assert(len(s.RegularPages()), qt.Equals, 8) - c.Assert(len(s.Pages()), qt.Equals, 16) - // No nn pages - c.Assert(len(s.AllPages()), qt.Equals, 16) - s.pageMap.withEveryBundlePage(func(p *pageState) bool { - c.Assert(p.Language().Lang != "nn", qt.Equals, true) - return false - }) + b.AssertFileContent("public/section/mybundle/index.html", + "Bundled page: /section/mybundle/|Title: P1|Content:P1 content.
\nMyShort.", + ) } -func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { - skipSymlink(t) - - wd, _ := os.Getwd() - defer func() { - os.Chdir(wd) - }() - - c := qt.New(t) - - // We need to use the OS fs for this. - workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugosym") - c.Assert(err, qt.IsNil) - cfg := config.New() - cfg.Set("workingDir", workingDir) - cfg.Set("publishDir", "public") - fs := hugofs.NewFromOld(hugofs.Os, cfg) - - contentDirName := "content" - - contentDir := filepath.Join(workingDir, contentDirName) - c.Assert(os.MkdirAll(filepath.Join(contentDir, "a"), 0777), qt.IsNil) - - for i := 1; i <= 3; i++ { - c.Assert(os.MkdirAll(filepath.Join(workingDir, fmt.Sprintf("symcontent%d", i)), 0777), qt.IsNil) - } - - c.Assert(os.MkdirAll(filepath.Join(workingDir, "symcontent2", "a1"), 0777), qt.IsNil) - - // Symlinked sections inside content. - os.Chdir(contentDir) - for i := 1; i <= 3; i++ { - c.Assert(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)), qt.IsNil) - } - - c.Assert(os.Chdir(filepath.Join(contentDir, "a")), qt.IsNil) - - // Create a symlink to one single content file - c.Assert(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"), qt.IsNil) - - c.Assert(os.Chdir(filepath.FromSlash("../../symcontent3")), qt.IsNil) +func TestPageBundlerResourceMultipleOutputFormatsWithDifferentPaths(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +[outputformats] +[outputformats.cpath] +mediaType = "text/html" +path = "cpath" +-- content/section/mybundle/index.md -- +--- +title: "My Bundle" +outputs: ["html", "cpath"] +--- +-- content/section/mybundle/hello.txt -- +Hello. +-- content/section/mybundle/p1.md -- +--- +title: "P1" +--- +P1. + +{{< hello >}} + +-- layouts/shortcodes/hello.html -- +Hello HTML. +-- layouts/_default/single.html -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +-- layouts/shortcodes/hello.cpath -- +Hello CPATH. +-- layouts/_default/single.cpath -- +Basic: {{ .Title }}|{{ .Kind }}|{{ .BundleType }}|{{ .RelPermalink }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| +` - // Create a circular symlink. Will print some warnings. - c.Assert(os.Symlink(filepath.Join("..", contentDirName), filepath.FromSlash("circus")), qt.IsNil) + b := Test(t, files) - c.Assert(os.Chdir(workingDir), qt.IsNil) + b.AssertFileContent("public/section/mybundle/index.html", + "Basic: My Bundle|page|leaf|/section/mybundle/|", + "Resources: RelPermalink: |Content:P1.
\nHello HTML.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||", + ) - defer clean() + b.AssertFileContent("public/cpath/section/mybundle/index.html", "Basic: My Bundle|page|leaf|/section/mybundle/|\nResources: RelPermalink: |Content:P1.
\nHello CPATH.\n|RelPermalink: /section/mybundle/hello.txt|Content: Hello.||") +} - cfg.Set("workingDir", workingDir) - cfg.Set("contentDir", contentDirName) - cfg.Set("baseURL", "https://example.com") - configs, err := loadTestConfigFromProvider(cfg) - c.Assert(err, qt.IsNil) +func TestPageBundlerMultilingualTextResource(t *testing.T) { + t.Parallel() - layout := `{{ .Title }}|{{ .Content }}` - pageContent := `--- -slug: %s -date: 2017-10-09 + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mybundle/index.md -- +--- +title: "My Bundle" +--- +-- content/mybundle/index.nn.md -- +--- +title: "My Bundle NN" --- +-- content/mybundle/f1.txt -- +F1 +-- content/mybundle/f2.txt -- +F2 +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}| +Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Content: {{ .Content }}|{{ end }}| -TheContent. ` + b := Test(t, files) - b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{ - Fs: fs, - Configs: configs, - }) - - b.WithTemplates( - "_default/single.html", layout, - "_default/list.html", layout, - ) - - b.WithContent( - "a/regular.md", fmt.Sprintf(pageContent, "a1"), - ) + b.AssertFileContent("public/en/mybundle/index.html", "My Bundle|/en/mybundle/|en|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /en/mybundle/f2.txt|Content: F2||") + b.AssertFileContent("public/nn/mybundle/index.html", "My Bundle NN|/nn/mybundle/|nn|\nResources: RelPermalink: /en/mybundle/f1.txt|Content: F1|RelPermalink: /nn/mybundle/f2.nn.txt|Content: F2 nn.||") +} - b.WithSourceFile( - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - // Regular files inside symlinked folder. - "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"), - "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"), - - // A bundle - "symcontent2/a1/index.md", fmt.Sprintf(pageContent, ""), - "symcontent2/a1/page.md", fmt.Sprintf(pageContent, "page"), - "symcontent2/a1/logo.png", "image", - - // Assets - "symcontent3/s1.png", "image", - "symcontent3/s2.png", "image", - ) +func TestMultilingualDisableLanguage(t *testing.T) { + t.Parallel() - b.Build(BuildCfg{}) - s := b.H.Sites[0] + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] +defaultContentLanguage = "en" +defaultContentLanguageInSubdir = true +disableLanguages = ["nn"] +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/p1.md -- +--- +title: "P1" +--- +P1 +-- content/p1.nn.md -- +--- +title: "P1nn" +--- +P1nn +-- layouts/_default/single.html -- +{{ .Title }}|{{ .Content }}|{{ .Lang }}| - c.Assert(len(s.RegularPages()), qt.Equals, 7) - a1Bundle := s.getPage(kinds.KindPage, "symbolic2/a1/index.md") - c.Assert(a1Bundle, qt.Not(qt.IsNil)) - c.Assert(len(a1Bundle.Resources()), qt.Equals, 2) - c.Assert(len(a1Bundle.Resources().ByType(pageResourceType)), qt.Equals, 1) +` + b := Test(t, files) - b.AssertFileContent(filepath.FromSlash("public/a/page/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic1/s1/index.html"), "TheContent") - b.AssertFileContent(filepath.FromSlash("public/symbolic2/a1/index.html"), "TheContent") + b.AssertFileContent("public/en/p1/index.html", "P1|P1
\n|en|") + b.AssertFileExists("public/public/nn/p1/index.html", false) + b.Assert(len(b.H.Sites), qt.Equals, 1) } func TestPageBundlerHeadless(t *testing.T) { @@ -544,10 +263,10 @@ HEADLESS {{< myShort >}} c.Assert(len(s.RegularPages()), qt.Equals, 1) - regular := s.getPage(kinds.KindPage, "a/index") + regular := s.getPageOldVersion(kinds.KindPage, "a/index") c.Assert(regular.RelPermalink(), qt.Equals, "/s1/") - headless := s.getPage(kinds.KindPage, "b/index") + headless := s.getPageOldVersion(kinds.KindPage, "b/index") c.Assert(headless, qt.Not(qt.IsNil)) c.Assert(headless.Title(), qt.Equals, "Headless Bundle in Topless Bar") c.Assert(headless.RelPermalink(), qt.Equals, "") @@ -576,6 +295,7 @@ HEADLESS {{< myShort >}} // No headless bundles here, please. // https://github.com/gohugoio/hugo/issues/6492 c.Assert(s.RegularPages(), qt.HasLen, 1) + c.Assert(s.Pages(), qt.HasLen, 4) c.Assert(s.home.RegularPages(), qt.HasLen, 1) c.Assert(s.home.Pages(), qt.HasLen, 1) } @@ -686,7 +406,6 @@ Single content. b.Build(BuildCfg{}) b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn") - b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en") b.AssertFileContent("public/mybundle/data.yaml", "data en") b.AssertFileContent("public/mybundle/forms.yaml", "forms en") @@ -701,293 +420,113 @@ Single content. b.AssertFileContent("public/section-not-bundle/single/index.html", "Section Single", "|Single content.
") } -func newTestBundleSources(t testing.TB) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - c := qt.New(t) - - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("mediaTypes", map[string]any{ - "bepsays/bep": map[string]any{ - "suffixes": []string{"bep"}, - }, - }) - - pageContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 ---- - -TheContent. -` - - pageContentShortcode := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 ---- - -TheContent. - -{{< myShort >}} -` - - pageWithImageShortcodeAndResourceMetadataContent := `--- -title: "Bundle Galore" -slug: pageslug -date: 2017-10-09 -resources: -- src: "*.jpg" - name: "my-sunset-:counter" - title: "Sunset Galore :counter" - params: - myParam: "My Sunny Param" ---- - -TheContent. - -{{< myShort >}} -` - - pageContentNoSlug := `--- -title: "Bundle Galore #2" -date: 2017-10-09 ---- - -TheContent. -` - - singleLayout := ` -Single Title: {{ .Title }} -Single RelPermalink: {{ .RelPermalink }} -Single Permalink: {{ .Permalink }} -Content: {{ .Content }} -{{ $sunset := .Resources.GetMatch "my-sunset-1*" }} -{{ with $sunset }} -Sunset RelPermalink: {{ .RelPermalink }} -Sunset Permalink: {{ .Permalink }} -{{ $thumb := .Fill "123x123" }} -Thumb Width: {{ $thumb.Width }} -Thumb Name: {{ $thumb.Name }} -Thumb Title: {{ $thumb.Title }} -Thumb RelPermalink: {{ $thumb.RelPermalink }} -{{ end }} -{{ $types := slice "image" "page" }} -{{ range $types }} -{{ $typeTitle := . | title }} -{{ range $i, $e := $.Resources.ByType . }} -{{ $i }}: {{ $typeTitle }} Title: {{ .Title }} -{{ $i }}: {{ $typeTitle }} Name: {{ .Name }} -{{ $i }}: {{ $typeTitle }} RelPermalink: {{ .RelPermalink }}| -{{ $i }}: {{ $typeTitle }} Params: {{ printf "%v" .Params }} -{{ $i }}: {{ $typeTitle }} myParam: Lower: {{ .Params.myparam }} Caps: {{ .Params.MYPARAM }} -{{ end }} -{{ end }} -` +func TestBundledResourcesMultilingualDuplicateResourceFiles(t *testing.T) { + t.Parallel() - myShort := ` -MyShort in {{ .Page.File.Path }}: -{{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }} -{{ with $sunset }} -Short Sunset RelPermalink: {{ .RelPermalink }} -{{ $thumb := .Fill "56x56" }} -Short Thumb Width: {{ $thumb.Width }} -{{ end }} + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +[markup] +[markup.goldmark] +duplicateResourceFiles = true +[languages] +[languages.en] +weight = 1 +[languages.en.permalinks] +"/" = "/enpages/:slug/" +[languages.nn] +weight = 2 +[languages.nn.permalinks] +"/" = "/nnpages/:slug/" +-- content/mybundle/index.md -- +--- +title: "My Bundle" +--- +{{< getresource "f1.txt" >}} +{{< getresource "f2.txt" >}} +-- content/mybundle/index.nn.md -- +--- +title: "My Bundle NN" +--- +{{< getresource "f1.txt" >}} +f2.nn.txt is the original name. +{{< getresource "f2.nn.txt" >}} +{{< getresource "f2.txt" >}} +{{< getresource "sub/f3.txt" >}} +-- content/mybundle/f1.txt -- +F1 en. +-- content/mybundle/sub/f3.txt -- +F1 en. +-- content/mybundle/f2.txt -- +F2 en. +-- content/mybundle/f2.nn.txt -- +F2 nn. +-- layouts/shortcodes/getresource.html -- +{{ $r := .Page.Resources.Get (.Get 0)}} +Resource: {{ (.Get 0) }}|{{ with $r }}{{ .RelPermalink }}|{{ .Content }}|{{ else }}Not found.{{ end}} +-- layouts/_default/single.html -- +{{ .Title }}|{{ .RelPermalink }}|{{ .Lang }}|{{ .Content }}| ` - - listLayout := `{{ .Title }}|{{ .Content }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), singleLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), listLayout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.html"), myShort) - writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.customo"), myShort) - - writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug) - writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug) - - // Mostly plain static assets in a folder with a page in a sub folder thrown in. - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent) - - // Bundle - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "2.md"), pageContentShortcode) - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "custom-mime.bep"), "bepsays") - writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "c", "logo.png"), "content") - - // Bundle with 은행 slug - // See https://github.com/gohugoio/hugo/issues/4241 - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "index.md"), `--- -title: "은행 은행" -slug: 은행 -date: 2017-10-09 ---- - -Content for 은행. + b := Test(t, files) + + // helpers.PrintFs(b.H.Fs.PublishDir, "", os.Stdout) + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/index.html", ` +My Bundle NN +Resource: f1.txt|/nn/nnpages/my-bundle-nn/f1.txt| +Resource: f2.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: f2.nn.txt|/nn/nnpages/my-bundle-nn/f2.nn.txt|F2 nn.| +Resource: sub/f3.txt|/nn/nnpages/my-bundle-nn/sub/f3.txt|F1 en.| `) - // Bundle in root - writeSource(t, fs, filepath.Join(workDir, "base", "root", "index.md"), pageWithImageShortcodeAndResourceMetadataContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "root", "c", "logo.png"), "content") + b.AssertFileContent("public/enpages/my-bundle/f2.txt", "F2 en.") + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f2.nn.txt", "F2 nn") - writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "logo-은행.png"), "은행 PNG") - - // Write a real image into one of the bundle above. - src, err := os.Open("testdata/sunset.jpg") - c.Assert(err, qt.IsNil) - - // We need 2 to test https://github.com/gohugoio/hugo/issues/4202 - out, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset1.jpg")) - c.Assert(err, qt.IsNil) - out2, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset2.jpg")) - c.Assert(err, qt.IsNil) - - _, err = io.Copy(out, src) - c.Assert(err, qt.IsNil) - out.Close() - src.Seek(0, 0) - _, err = io.Copy(out2, src) - out2.Close() - src.Close() - c.Assert(err, qt.IsNil) - - return fs, cfg -} - -func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, config.Provider) { - cfg, fs := newTestCfgBasic() - - workDir := "/work" - cfg.Set("workingDir", workDir) - cfg.Set("contentDir", "base") - cfg.Set("baseURL", "https://example.com") - cfg.Set("defaultContentLanguage", "en") - - langConfig := map[string]any{ - "en": map[string]any{ - "weight": 1, - "languageName": "English", - }, - "nn": map[string]any{ - "weight": 2, - "languageName": "Nynorsk", - }, - } - - cfg.Set("languages", langConfig) - - pageContent := `--- -slug: pageslug -date: 2017-10-09 ---- - -TheContent. -` + b.AssertFileContent("public/enpages/my-bundle/index.html", ` +Resource: f1.txt|/enpages/my-bundle/f1.txt|F1 en.| +Resource: f2.txt|/enpages/my-bundle/f2.txt|F2 en.| +`) + b.AssertFileContent("public/enpages/my-bundle/f1.txt", "F1 en.") - layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}` - - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout) - writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout) - - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content") - - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), "logo") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.json"), "data1") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data2.json"), "data2") - writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.nn.json"), "data1.nn") - - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent) - - writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent) - - // Bundle leaf, multilingual - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "page.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content") - writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content") - - // Translated bundle in some sensible sub path. - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent) - writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent) - - return fs, cfg + // Should be duplicated to the nn bundle. + b.AssertFileContent("public/nn/nnpages/my-bundle-nn/f1.txt", "F1 en.") } // https://github.com/gohugoio/hugo/issues/5858 func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) { t.Parallel() - b := newTestSitesBuilder(t).Running().WithConfigFile("toml", ` + files := ` +-- hugo.toml -- baseURL = "https://example.org" +disableKinds = ["taxonomy", "term"] +disableLiveReload = true [outputs] - # This looks odd, but it triggers the behaviour in #5858 - # The total output formats list gets sorted, so CSS before HTML. - home = [ "CSS" ] - -`) - b.WithContent("mybundle/index.md", ` +# This looks odd, but it triggers the behaviour in #5858 +# The total output formats list gets sorted, so CSS before HTML. +home = [ "CSS" ] +-- content/mybundle/index.md -- --- title: Page -date: 2017-01-15 --- -`, - "mybundle/data.json", "MyData", - ) +-- content/mybundle/data.json -- +MyData +-- layouts/_default/single.html -- +{{ range .Resources }} +{{ .ResourceType }}|{{ .Title }}| +{{ end }} +` - b.CreateSites().Build(BuildCfg{}) + b := TestRunning(t, files) b.AssertFileContent("public/mybundle/data.json", "MyData") - // Change the bundled JSON file and make sure it gets republished. - b.EditFiles("content/mybundle/data.json", "My changed data") - - b.Build(BuildCfg{}) + b.EditFileReplaceAll("content/mybundle/data.json", "MyData", "My changed data").Build() b.AssertFileContent("public/mybundle/data.json", "My changed data") } +// https://github.com/gohugoio/hugo/issues/5858 + // https://github.com/gohugoio/hugo/issues/4870 func TestBundleSlug(t *testing.T) { t.Parallel() @@ -1016,191 +555,45 @@ slug: %s c.Assert(b.CheckExists("public/about/services2/this-is-another-slug/index.html"), qt.Equals, true) } -func TestBundleMisc(t *testing.T) { - config := ` -baseURL = "https://example.com" -defaultContentLanguage = "en" -defaultContentLanguageInSubdir = true -ignoreFiles = ["README\\.md", "content/en/ignore"] - -[Languages] -[Languages.en] -weight = 99999 -contentDir = "content/en" -[Languages.nn] -weight = 20 -contentDir = "content/nn" -[Languages.sv] -weight = 30 -contentDir = "content/sv" -[Languages.nb] -weight = 40 -contentDir = "content/nb" - -` - - const pageContent = `--- -title: %q ---- -` - createPage := func(s string) string { - return fmt.Sprintf(pageContent, s) - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - b.WithLogger(loggers.NewDefault()) - - b.WithTemplates("_default/list.html", `{{ range .Site.Pages }} -{{ .Kind }}|{{ .Path }}|{{ with .CurrentSection }}CurrentSection: {{ .Path }}{{ end }}|{{ .RelPermalink }}{{ end }} -`) - - b.WithTemplates("_default/single.html", `Single: {{ .Title }}`) - - b.WithContent("en/sect1/sect2/_index.md", createPage("en: Sect 2")) - b.WithContent("en/sect1/sect2/page.md", createPage("en: Page")) - b.WithContent("en/sect1/sect2/data-branch.json", "mydata") - b.WithContent("nn/sect1/sect2/page.md", createPage("nn: Page")) - b.WithContent("nn/sect1/sect2/data-branch.json", "my nn data") - - // En only - b.WithContent("en/enonly/myen.md", createPage("en: Page")) - b.WithContent("en/enonly/myendata.json", "mydata") - - // Leaf - - b.WithContent("nn/b1/index.md", createPage("nn: leaf")) - b.WithContent("en/b1/index.md", createPage("en: leaf")) - b.WithContent("sv/b1/index.md", createPage("sv: leaf")) - b.WithContent("nb/b1/index.md", createPage("nb: leaf")) - - // Should be ignored - b.WithContent("en/ignore/page.md", createPage("en: ignore")) - b.WithContent("en/README.md", createPage("en: ignore")) - - // Both leaf and branch bundle in same dir - b.WithContent("en/b2/index.md", `--- -slug: leaf ---- -`) - b.WithContent("en/b2/_index.md", createPage("en: branch")) - - b.WithContent("en/b1/data1.json", "en: data") - b.WithContent("sv/b1/data1.json", "sv: data") - b.WithContent("sv/b1/data2.json", "sv: data2") - b.WithContent("nb/b1/data2.json", "nb: data2") - - b.WithContent("en/b3/_index.md", createPage("en: branch")) - b.WithContent("en/b3/p1.md", createPage("en: page")) - b.WithContent("en/b3/data1.json", "en: data") - - b.Build(BuildCfg{}) - - b.AssertFileContent("public/en/index.html", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"), - "myen.md|CurrentSection: enonly") - - b.AssertFileContentFn("public/en/index.html", func(s string) bool { - // Check ignored files - return !regexp.MustCompile("README|ignore").MatchString(s) - }) - - b.AssertFileContent("public/nn/index.html", filepath.FromSlash("page|sect1/sect2/page.md|CurrentSection: sect1")) - b.AssertFileContentFn("public/nn/index.html", func(s string) bool { - return !strings.Contains(s, "enonly") - }) - - // Check order of inherited data file - b.AssertFileContent("public/nb/b1/data1.json", "en: data") // Default content - b.AssertFileContent("public/nn/b1/data2.json", "sv: data") // First match - - b.AssertFileContent("public/en/enonly/myen/index.html", "Single: en: Page") - b.AssertFileContent("public/en/enonly/myendata.json", "mydata") - - c := qt.New(t) - c.Assert(b.CheckExists("public/sv/enonly/myen/index.html"), qt.Equals, false) - - // Both leaf and branch bundle in same dir - // We log a warning about it, but we keep both. - b.AssertFileContent("public/en/b2/index.html", - "/en/b2/leaf/", - filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md")) -} - -// Issue 6136 +// See #11663 func TestPageBundlerPartialTranslations(t *testing.T) { - config := ` -baseURL = "https://example.org" + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["taxonomy", "term"] defaultContentLanguage = "en" defaultContentLanguageInSubDir = true -disableKinds = ["taxonomy", "term"] [languages] [languages.nn] -languageName = "Nynorsk" weight = 2 -title = "Tittel på Nynorsk" [languages.en] -title = "Title in English" -languageName = "English" weight = 1 -` - - pageContent := func(id string) string { - return fmt.Sprintf(` +-- content/section/mybundle/index.md -- --- -title: %q +title: "Mybundle" +--- +-- content/section/mybundle/bundledpage.md -- +--- +title: "Bundled page en" +--- +-- content/section/mybundle/bundledpage.nn.md -- +--- +title: "Bundled page nn" --- -`, id) - } - - dataContent := func(id string) string { - return id - } - - b := newTestSitesBuilder(t).WithConfigFile("toml", config) - - b.WithContent("blog/sect1/_index.nn.md", pageContent("s1.nn")) - b.WithContent("blog/sect1/data.json", dataContent("s1.data")) - - b.WithContent("blog/sect1/b1/index.nn.md", pageContent("s1.b1.nn")) - b.WithContent("blog/sect1/b1/data.json", dataContent("s1.b1.data")) - - b.WithContent("blog/sect2/_index.md", pageContent("s2")) - b.WithContent("blog/sect2/data.json", dataContent("s2.data")) - - b.WithContent("blog/sect2/b1/index.md", pageContent("s2.b1")) - b.WithContent("blog/sect2/b1/data.json", dataContent("s2.b1.data")) - - b.WithContent("blog/sect2/b2/index.md", pageContent("s2.b2")) - b.WithContent("blog/sect2/b2/bp.md", pageContent("s2.b2.bundlecontent")) - - b.WithContent("blog/sect2/b3/index.md", pageContent("s2.b3")) - b.WithContent("blog/sect2/b3/bp.nn.md", pageContent("s2.b3.bundlecontent.nn")) - b.WithContent("blog/sect2/b4/index.nn.md", pageContent("s2.b4")) - b.WithContent("blog/sect2/b4/bp.nn.md", pageContent("s2.b4.bundlecontent.nn")) +-- layouts/_default/single.html -- +Bundled page: {{ .RelPermalink}}|Len resources: {{ len .Resources }}| - b.WithTemplates("index.html", ` -Num Pages: {{ len .Site.Pages }} -{{ range .Site.Pages }} -{{ .Kind }}|{{ .RelPermalink }}|Content: {{ .Title }}|Resources: {{ range .Resources }}R: {{ .Title }}|{{ .Content }}|{{ end -}} -{{ end }} -`) - b.Build(BuildCfg{}) +` + b := Test(t, files) - b.AssertFileContent("public/nn/index.html", - "Num Pages: 6", - "page|/nn/blog/sect1/b1/|Content: s1.b1.nn|Resources: R: data.json|s1.b1.data|", - "page|/nn/blog/sect2/b3/|Content: s2.b3|Resources: R: s2.b3.bundlecontent.nn|", - "page|/nn/blog/sect2/b4/|Content: s2.b4|Resources: R: s2.b4.bundlecontent.nn", + b.AssertFileContent("public/en/section/mybundle/index.html", + "Bundled page: /en/section/mybundle/|Len resources: 1|", ) - b.AssertFileContent("public/en/index.html", - "Num Pages: 6", - "section|/en/blog/sect2/|Content: s2|Resources: R: data.json|s2.data|", - "page|/en/blog/sect2/b1/|Content: s2.b1|Resources: R: data.json|s2.b1.data|", - "page|/en/blog/sect2/b2/|Content: s2.b2|Resources: R: s2.b2.bundlecontent|", - ) + b.AssertFileExists("public/nn/section/mybundle/index.html", false) } // #6208 @@ -1329,7 +722,7 @@ func TestPageBundlerHome(t *testing.T) { cfg.Set("publishDir", "public") fs := hugofs.NewFromOld(hugofs.Os, cfg) - os.MkdirAll(filepath.Join(workDir, "content"), 0777) + os.MkdirAll(filepath.Join(workDir, "content"), 0o777) defer clean() diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index 26da4905e80..8e05ad7e60d 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,91 +18,65 @@ import ( "path" "path/filepath" "strings" - "sync" - - "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs/files" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" ) -// PageCollections contains the page collections for a site. -type PageCollections struct { +// pageFinder provides ways to find a Page in a Site. +type pageFinder struct { pageMap *pageMap - - // Lazy initialized page collections - pages *lazyPagesFactory - regularPages *lazyPagesFactory - allPages *lazyPagesFactory - allRegularPages *lazyPagesFactory -} - -// Pages returns all pages. -// This is for the current language only. -func (c *PageCollections) Pages() page.Pages { - return c.pages.get() -} - -// RegularPages returns all the regular pages. -// This is for the current language only. -func (c *PageCollections) RegularPages() page.Pages { - return c.regularPages.get() -} - -// AllPages returns all pages for all languages. -func (c *PageCollections) AllPages() page.Pages { - return c.allPages.get() } -// AllRegularPages returns all regular pages for all languages. -func (c *PageCollections) AllRegularPages() page.Pages { - return c.allRegularPages.get() -} - -type lazyPagesFactory struct { - pages page.Pages - - init sync.Once - factory page.PagesFactory -} - -func (l *lazyPagesFactory) get() page.Pages { - l.init.Do(func() { - l.pages = l.factory() - }) - return l.pages -} - -func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory { - return &lazyPagesFactory{factory: factory} -} - -func newPageCollections(m *pageMap) *PageCollections { +func newPageFinder(m *pageMap) *pageFinder { if m == nil { panic("must provide a pageMap") } + c := &pageFinder{pageMap: m} + return c +} - c := &PageCollections{pageMap: m} +// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive +// search path than getPage. +func (c *pageFinder) getPageRef(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, true, ref) + if err != nil { + return nil, err + } - c.pages = newLazyPagesFactory(func() page.Pages { - return m.createListAllPages() - }) + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} - c.regularPages = newLazyPagesFactory(func() page.Pages { - return c.findPagesByKindIn(kinds.KindPage, c.pages.get()) - }) +func (c *pageFinder) getPage(context page.Page, ref string) (page.Page, error) { + n, err := c.getContentNode(context, false, filepath.ToSlash(ref)) + if err != nil { + return nil, err + } + if p, ok := n.(page.Page); ok { + return p, nil + } + return nil, nil +} - return c +// Only used in tests. +func (c *pageFinder) getPageOldVersion(kind string, sections ...string) page.Page { + refs := append([]string{kind}, path.Join(sections...)) + p, _ := c.getPageForRefs(refs...) + return p } // This is an adapter func for the old API with Kind as first argument. // This is invoked when you do .Site.GetPage. We drop the Kind and fails // if there are more than 2 arguments, which would be ambiguous. -func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { +func (c *pageFinder) getPageForRefs(ref ...string) (page.Page, error) { var refs []string for _, r := range ref { // A common construct in the wild is @@ -141,201 +115,156 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) { key = "/" + key } - return c.getPageNew(nil, key) + return c.getPage(nil, key) } -// Only used in tests. -func (c *PageCollections) getPage(typ string, sections ...string) page.Page { - refs := append([]string{typ}, path.Join(sections...)) - p, _ := c.getPageOldVersion(refs...) - return p -} +const defaultContentExt = ".md" -// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive -// search path than getPageNew. -func (c *PageCollections) getPageRef(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, true, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) { - n, err := c.getContentNode(context, false, ref) - if err != nil || n == nil || n.p == nil { - return nil, err - } - return n.p, nil -} - -func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) { - var n *contentNode - - pref := helpers.AddTrailingSlash(ref) - s, v, found := c.pageMap.sections.LongestPrefix(pref) - - if found { - n = v.(*contentNode) +func (c *pageFinder) getContentNode(context page.Page, isReflink bool, ref string) (contentNodeI, error) { + inRef := ref + if ref == "" { + ref = "/" } - if found && s == pref { - // A section - return n, "" + if paths.HasExt(ref) { + return c.getContentNodeForRef(context, isReflink, true, inRef, ref) } - m := c.pageMap - - filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/") - langSuffix := "." + m.s.Lang() - - // Trim both extension and any language code. - name := paths.PathNoExt(filename) - name = strings.TrimSuffix(name, langSuffix) - - // These are reserved bundle names and will always be stored by their owning - // folder name. - name = strings.TrimSuffix(name, "/index") - name = strings.TrimSuffix(name, "/_index") + var refs []string - if !found { - return nil, name + // We are always looking for a content file and having an extension greatly simplifies the code that follows, + // even in the case where the extension does not match this one. + if ref == "/" { + refs = append(refs, "/_index"+defaultContentExt) + } else if strings.HasSuffix(ref, "/index") { + refs = append(refs, ref+"/index"+defaultContentExt) + refs = append(refs, ref+defaultContentExt) + } else { + refs = append(refs, ref+defaultContentExt) } - // Check if it's a section with filename provided. - if !n.p.File().IsZero() && n.p.File().LogicalName() == filename { - return n, name + for _, ref := range refs { + n, err := c.getContentNodeForRef(context, isReflink, false, inRef, ref) + if n != nil || err != nil { + return n, err + } } - return m.getPage(s, name), name + return nil, nil } -// For Ref/Reflink and .Site.GetPage do simple name lookups for the potentially ambiguous myarticle.md and /myarticle.md, -// but not when we get ./myarticle*, section/myarticle. -func shouldDoSimpleLookup(ref string) bool { - if ref[0] == '.' { - return false - } +func (c *pageFinder) getContentNodeForRef(context page.Page, isReflink, hadExtension bool, inRef, ref string) (contentNodeI, error) { + s := c.pageMap.s + contentPathParser := s.Conf.PathParser() - slashCount := strings.Count(ref, "/") + if context != nil && !strings.HasPrefix(ref, "/") { + // Try the page-relative path first. + // Branch pages: /mysection, "./mypage" => /mysection/mypage + // Regular pages: /mysection/mypage.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Regular leaf bundles: /mysection/mypage/index.md, Path=/mysection/mypage, "./someotherpage" => /mysection/mypage/../someotherpage + // Given the above, for regular pages we use the containing folder. + var baseDir string + if pi := context.PathInfo(); pi != nil { + if pi.IsBranchBundle() || (hadExtension) { + baseDir = pi.Dir() + } else { + baseDir = pi.ContainerDir() + } + } - if slashCount > 1 { - return false - } + rel := path.Join(baseDir, inRef) - return slashCount == 0 || ref[0] == '/' -} + if !hadExtension && !paths.HasExt(rel) { + // See comment above. + rel += defaultContentExt + } -func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) { - ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref))) + relPath := contentPathParser.Parse(files.ComponentFolderContent, rel) - if ref == "" { - ref = "/" - } - - inRef := ref - navUp := strings.HasPrefix(ref, "..") - var doSimpleLookup bool - if isReflink || context == nil { - doSimpleLookup = shouldDoSimpleLookup(ref) - } + n, err := c.getContentNodeFromPath(relPath, ref) + if n != nil || err != nil { + return n, err + } - if context != nil && !strings.HasPrefix(ref, "/") { - // Try the page-relative path. - var base string - if context.File().IsZero() { - base = context.SectionsPath() - } else { - meta := context.File().FileInfo().Meta() - base = filepath.ToSlash(filepath.Dir(meta.Path)) - if meta.Classifier == files.ContentClassLeaf { - // Bundles are stored in subfolders e.g. blog/mybundle/index.md, - // so if the user has not explicitly asked to go up, - // look on the "blog" level. - if !navUp { - base = path.Dir(base) - } + if hadExtension && context.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, context.File().FileInfo()); n != nil || err != nil { + return n, err } } - ref = path.Join("/", strings.ToLower(base), ref) + } - if !strings.HasPrefix(ref, "/") { - ref = "/" + ref + if strings.HasPrefix(ref, ".") { + // Page relative, no need to look further. + return nil, nil } - m := c.pageMap + refPath := contentPathParser.Parse(files.ComponentFolderContent, ref) - // It's either a section, a page in a section or a taxonomy node. - // Start with the most likely: - n, name := c.getSectionOrPage(ref) - if n != nil { - return n, nil - } + n, err := c.getContentNodeFromPath(refPath, ref) - if !strings.HasPrefix(inRef, "/") { - // Many people will have "post/foo.md" in their content files. - if n, _ := c.getSectionOrPage("/" + inRef); n != nil { - return n, nil - } + if n != nil || err != nil { + return n, err } - // Check if it's a taxonomy node - pref := helpers.AddTrailingSlash(ref) - s, v, found := m.taxonomies.LongestPrefix(pref) - - if found { - if !m.onSameLevel(pref, s) { - return nil, nil + if hadExtension && s.home != nil && s.home.File() != nil { + if n, err := c.getContentNodeFromRefReverseLookup(inRef, s.home.File().FileInfo()); n != nil || err != nil { + return n, err } - return v.(*contentNode), nil } - getByName := func(s string) (*contentNode, error) { - n := m.pageReverseIndex.Get(s) - if n != nil { - if n == ambiguousContentNode { - return nil, fmt.Errorf("page reference %q is ambiguous", ref) - } - return n, nil + var doSimpleLookup bool + if isReflink || context == nil { + slashCount := strings.Count(inRef, "/") + if slashCount <= 1 { + doSimpleLookup = slashCount == 0 || ref[0] == '/' } + } + if !doSimpleLookup { return nil, nil } - var module string - if context != nil && !context.File().IsZero() { - module = context.File().FileInfo().Meta().Module + n = c.pageMap.pageReverseIndex.Get(refPath.BaseNameNoIdentifier()) + if n == ambiguousContentNode { + return nil, fmt.Errorf("page reference %q is ambiguous", inRef) } - if module == "" && !c.pageMap.s.home.File().IsZero() { - module = c.pageMap.s.home.File().FileInfo().Meta().Module - } + return n, nil +} - if module != "" { - n, err := getByName(module + ref) - if err != nil { - return nil, err - } - if n != nil { - return n, nil - } +func (c *pageFinder) getContentNodeFromRefReverseLookup(ref string, fi hugofs.FileMetaInfo) (contentNodeI, error) { + s := c.pageMap.s + meta := fi.Meta() + dir := meta.Filename + if !fi.IsDir() { + dir = filepath.Dir(meta.Filename) } - if !doSimpleLookup { - return nil, nil + realFilename := filepath.Join(dir, ref) + + pcs, err := s.BaseFs.Content.ReverseLookup(realFilename) + if err != nil { + return nil, err } - // Ref/relref supports this potentially ambiguous lookup. - return getByName(path.Base(name)) + // There may be multiple matches, but we will only use the first one. + for _, pc := range pcs { + pi := s.Conf.PathParser().Parse(pc.Component, pc.Path) + if n := c.pageMap.treePages.Get(pi.Base()); n != nil { + return n, nil + } + } + return nil, nil } -func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { - var pages page.Pages - for _, p := range inPages { - if p.Kind() == kind { - pages = append(pages, p) - } +func (c *pageFinder) getContentNodeFromPath(refPath *paths.Path, ref string) (contentNodeI, error) { + s := refPath.Base() + + n := c.pageMap.treePages.Get(s) + if n != nil { + return n, nil } - return pages + + return nil, nil } diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index b11fc9899ad..8fd4f07395c 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -63,12 +63,12 @@ func BenchmarkGetPage(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - home, _ := s.getPageNew(nil, "/") + home, _ := s.getPage(nil, "/") if home == nil { b.Fatal("Home is nil") } - p, _ := s.getPageNew(nil, pagePaths[i]) + p, _ := s.getPage(nil, pagePaths[i]) if p == nil { b.Fatal("Section is nil") } @@ -107,7 +107,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) { for i := 0; i < 10; i++ { pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)) - page, _ := s.getPageNew(nil, pp) + page, _ := s.getPage(nil, pp) c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp)) } } @@ -127,7 +127,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(nil, pagePaths[i]) + page, _ := s.getPage(nil, pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -147,7 +147,7 @@ func BenchmarkGetPageRegular(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - page, _ := s.getPageNew(pages[i], pagePaths[i]) + page, _ := s.getPage(pages[i], pagePaths[i]) c.Assert(page, qt.Not(qt.IsNil)) } }) @@ -226,7 +226,7 @@ func TestGetPage(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true}) - sec3, err := s.getPageNew(nil, "/sect3") + sec3, err := s.getPage(nil, "/sect3") c.Assert(err, qt.IsNil) c.Assert(sec3, qt.Not(qt.IsNil)) @@ -313,15 +313,36 @@ func TestGetPage(t *testing.T) { } } - // test new internal Site.getPageNew + // test new internal Site.getPage for _, ref := range test.pathVariants { - page2, err := s.getPageNew(test.context, ref) + page2, err := s.getPage(test.context, ref) test.check(page2, err, errorMsg, c) } }) } } +// #11664 +func TestGetPageIndexIndex(t *testing.T) { + files := ` +-- hugo.toml -- +disableKinds = ["taxonomy", "term"] +-- content/mysect/index/index.md -- +--- +title: "Mysect Index" +--- +-- layouts/index.html -- +GetPage 1: {{ with site.GetPage "mysect/index/index.md" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +GetPage 2: {{ with site.GetPage "mysect/index" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Path }}{{ end }}| +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", + "GetPage 1: Mysect Index|/mysect/index/|/mysect/index|", + "GetPage 2: Mysect Index|/mysect/index/|/mysect/index|", + ) +} + // https://github.com/gohugoio/hugo/issues/6034 func TestGetPageRelative(t *testing.T) { b := newTestSitesBuilder(t) @@ -348,6 +369,172 @@ NOT FOUND b.AssertFileContent("public/who/index.html", `NOT FOUND`) } +func TestGetPageIssue11883(t *testing.T) { + files := ` +-- hugo.toml -- +-- p1/index.md -- +--- +title: p1 +--- +-- p1/p1.xyz -- +xyz. +-- layouts/index.html -- +Home. {{ with .Page.GetPage "p1.xyz" }}{{ else }}OK 1{{ end }} {{ with .Site.GetPage "p1.xyz" }}{{ else }}OK 2{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/index.html", "Home. OK 1 OK 2") +} + +func TestGetPageBundleToRegular(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- layouts/_default/single.html -- +{{ with .GetPage "p2" }} + OK: {{ .LinkTitle }} +{{ else }} + Unable to get p2. +{{ end }} +` + + b := Test(t, files) + b.AssertFileContent("public/s1/p1/index.html", "OK: p2") + b.AssertFileContent("public/s1/p2/index.html", "OK: p2") +} + +func TestPageGetPageVariations(t *testing.T) { + files := ` +-- hugo.toml -- +-- content/s1/p1/index.md -- +--- +title: p1 +--- +-- content/s1/p2.md -- +--- +title: p2 +--- +-- content/s2/p3/index.md -- +--- +title: p3 +--- +-- content/p2.md -- +--- +title: p2_root +--- +-- layouts/index.html -- +/s1/p2.md: {{ with .GetPage "/s1/p2.md" }}{{ .Title }}{{ end }}| +/s1/p2: {{ with .GetPage "/s1/p2" }}{{ .Title }}{{ end }}| +/s1/p1/index.md: {{ with .GetPage "/s1/p1/index.md" }}{{ .Title }}{{ end }}| +/s1/p1: {{ with .GetPage "/s1/p1" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +../p2: {{ with .GetPage "../p2" }}{{ .Title }}{{ end }}| +../p2.md: {{ with .GetPage "../p2.md" }}{{ .Title }}{{ end }}| +p1/index.md: {{ with .GetPage "p1/index.md" }}{{ .Title }}{{ end }}| +../s2/p3/index.md: {{ with .GetPage "../s2/p3/index.md" }}{{ .Title }}{{ end }}| +` + + b := Test(t, files) + + b.AssertFileContent("public/index.html", ` +/s1/p2.md: p2| +/s1/p2: p2| +/s1/p1/index.md: p1| +/s1/p1: p1| +`) + + b.AssertFileContent("public/s1/p1/index.html", ` +../p2: p2_root| +../p2.md: p2| + +`) + + b.AssertFileContent("public/s1/p2/index.html", ` +../p2: p2_root| +../p2.md: p2_root| +p1/index.md: p1| +../s2/p3/index.md: p3| + +`) +} + +func TestPageGetPageMountsReverseLookup(t *testing.T) { + tempDir := t.TempDir() + + files := ` +-- README.md -- +--- +title: README +--- +-- blog/b1.md -- +--- +title: b1 +--- +{{< ref "../docs/d1.md" >}} +-- blog/b2/index.md -- +--- +title: b2 +--- +{{< ref "../../docs/d1.md" >}} +-- docs/d1.md -- +--- +title: d1 +--- +-- hugo.toml -- +baseURL = "https://example.com/" +[module] +[[module.mounts]] +source = "layouts" +target = "layouts" +[[module.mounts]] +source = "README.md" +target = "content/_index.md" +[[module.mounts]] +source = "blog" +target = "content/posts" +[[module.mounts]] +source = "docs" +target = "content/mydocs" +-- layouts/shortcodes/ref.html -- +{{ $ref := .Get 0 }} +.Page.GetPage({{ $ref }}).Title: {{ with .Page.GetPage $ref }}{{ .Title }}{{ end }}| +-- layouts/index.html -- +Home. +/blog/b1.md: {{ with .GetPage "/blog/b1.md" }}{{ .Title }}{{ end }}| +/blog/b2/index.md: {{ with .GetPage "/blog/b2/index.md" }}{{ .Title }}{{ end }}| +/docs/d1.md: {{ with .GetPage "/docs/d1.md" }}{{ .Title }}{{ end }}| +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +-- layouts/_default/single.html -- +Single. +/README.md: {{ with .GetPage "/README.md" }}{{ .Title }}{{ end }}| +{{ .Content }} + + +` + b := Test(t, files, TestOptWithConfig(func(cfg *IntegrationTestConfig) { cfg.WorkingDir = tempDir })) + + b.AssertFileContent("public/index.html", + ` +/blog/b1.md: b1| +/blog/b2/index.md: b2| +/docs/d1.md: d1| +/README.md: README +`, + ) + + b.AssertFileContent("public/mydocs/d1/index.html", `README.md: README|`) + + b.AssertFileContent("public/posts/b1/index.html", `.Page.GetPage(../docs/d1.md).Title: d1|`) + b.AssertFileContent("public/posts/b2/index.html", `.Page.GetPage(../../docs/d1.md).Title: d1|`) +} + // https://github.com/gohugoio/hugo/issues/7016 func TestGetPageMultilingual(t *testing.T) { b := newTestSitesBuilder(t) @@ -386,15 +573,6 @@ NOT FOUND b.AssertFileContent("public/en/index.html", `NOT FOUND`) } -func TestShouldDoSimpleLookup(t *testing.T) { - c := qt.New(t) - - c.Assert(shouldDoSimpleLookup("foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("/foo.md"), qt.Equals, true) - c.Assert(shouldDoSimpleLookup("./foo.md"), qt.Equals, false) - c.Assert(shouldDoSimpleLookup("docs/foo.md"), qt.Equals, false) -} - func TestRegularPagesRecursive(t *testing.T) { b := newTestSitesBuilder(t) @@ -449,5 +627,4 @@ RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .Kind }}:{{ .RelPerm }).Build() b.AssertFileContent("public/index.html", `RegularPagesRecursive: page:/p1/|page:/post/p2/||End.`) - } diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index c57c707de1e..acdc674e63d 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. +// Copyright 2021 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,190 +15,188 @@ package hugolib import ( "context" + "errors" "fmt" - pth "path" + "os" "path/filepath" - "reflect" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/common/maps" - + "strings" + "sync" + "sync/atomic" + "time" + + "github.com/bep/logg" + "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/common/rungroup" + "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/parser/pageparser" - - "github.com/gohugoio/hugo/hugofs/files" + "github.com/spf13/afero" "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/afero" -) - -const ( - walkIsRootFileMetaKey = "walkIsRootFileMetaKey" ) func newPagesCollector( + ctx context.Context, + h *HugoSites, sp *source.SourceSpec, - contentMap *pageMaps, logger loggers.Logger, - contentTracker *contentChangeMap, - proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector { + infoLogger logg.LevelLogger, + m *pageMap, + ids []pathChange, +) *pagesCollector { return &pagesCollector{ - fs: sp.SourceFs, - contentMap: contentMap, - proc: proc, + ctx: ctx, + h: h, + fs: sp.BaseFs.Content.Fs, + m: m, sp: sp, logger: logger, - filenames: filenames, - tracker: contentTracker, + infoLogger: infoLogger, + ids: ids, + seenDirs: make(map[string]bool), } } -type contentDirKey struct { - dirname string - filename string - tp bundleDirType -} - -type fileinfoBundle struct { - header hugofs.FileMetaInfo - resources []hugofs.FileMetaInfo -} - -func (b *fileinfoBundle) containsResource(name string) bool { - for _, r := range b.resources { - if r.Name() == name { - return true - } - } - - return false -} - -type pageBundles map[string]*fileinfoBundle - type pagesCollector struct { - sp *source.SourceSpec - fs afero.Fs - logger loggers.Logger + ctx context.Context + h *HugoSites + sp *source.SourceSpec + logger loggers.Logger + infoLogger logg.LevelLogger - contentMap *pageMaps + m *pageMap - // Ordered list (bundle headers first) used in partial builds. - filenames []string + fs afero.Fs - // Content files tracker used in partial builds. - tracker *contentChangeMap + // List of paths that have changed. Used in partial builds. + ids []pathChange + seenDirs map[string]bool - proc pagesCollectorProcessorProvider + g rungroup.Group[hugofs.FileMetaInfo] } -// isCascadingEdit returns whether the dir represents a cascading edit. -// That is, if a front matter cascade section is removed, added or edited. -// If this is the case we must re-evaluate its descendants. -func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) { - // This is either a section or a taxonomy node. Find it. - prefix := cleanTreeKey(dir.dirname) - - section := "/" - var isCascade bool - - c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool { - if n.fi == nil || dir.filename != n.fi.Meta().Filename { - return false - } - - f, err := n.fi.Meta().Open() - if err != nil { - // File may have been removed, assume a cascading edit. - // Some false positives is not too bad. - isCascade = true - return true - } - - pf, err := pageparser.ParseFrontMatterAndContent(f) - f.Close() - if err != nil { - isCascade = true - return true - } - - if n.p == nil || n.p.bucket == nil { - return true - } +func (c *pagesCollector) copyFile(fim hugofs.FileMetaInfo) error { + meta := fim.Meta() + f, err := meta.Open() + if err != nil { + return fmt.Errorf("copyFile: failed to open: %w", err) + } - section = s + s := c.m.s - maps.PrepareParams(pf.FrontMatter) - cascade1, ok := pf.FrontMatter["cascade"] - hasCascade := n.p.bucket.cascade != nil && len(n.p.bucket.cascade) > 0 - if !ok { - isCascade = hasCascade + target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.PathInfo.Path()) - return true - } - - if !hasCascade { - isCascade = true - return true - } + defer f.Close() - for _, v := range n.p.bucket.cascade { - isCascade = !reflect.DeepEqual(cascade1, v) - if isCascade { - break - } - } + fs := s.PublishFsStatic - return true - }) + s.PathSpec.ProcessingStats.Incr(&s.PathSpec.ProcessingStats.Files) - return isCascade, section + return helpers.WriteToDisk(filepath.Clean(target), f, fs) } -// Collect. +// Collect collects content by walking the file system and storing +// it in the content tree. +// It may be restricted by filenames set on the collector (partial build). func (c *pagesCollector) Collect() (collectErr error) { - c.proc.Start(context.Background()) + var ( + numWorkers = c.h.numWorkers + numFilesProcessedTotal atomic.Uint64 + numFilesProcessedLast uint64 + fileBatchTimer = time.Now() + fileBatchTimerMu sync.Mutex + ) + + l := c.infoLogger.WithField("substep", "collect") + + logFilesProcessed := func(force bool) { + fileBatchTimerMu.Lock() + if force || time.Since(fileBatchTimer) > 3*time.Second { + numFilesProcessedBatch := numFilesProcessedTotal.Load() - numFilesProcessedLast + numFilesProcessedLast = numFilesProcessedTotal.Load() + loggers.TimeTrackf(l, fileBatchTimer, + logg.Fields{ + logg.Field{Name: "files", Value: numFilesProcessedBatch}, + logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()}, + }, + "", + ) + fileBatchTimer = time.Now() + } + fileBatchTimerMu.Unlock() + } + defer func() { - err := c.proc.Wait() - if collectErr == nil { - collectErr = err - } + logFilesProcessed(true) }() - if len(c.filenames) == 0 { - // Collect everything. - collectErr = c.collectDir("", false, nil) - } else { - for _, pm := range c.contentMap.pmaps { - pm.cfg.isRebuild = true - } - dirs := make(map[contentDirKey]bool) - for _, filename := range c.filenames { - dir, btype := c.tracker.resolveAndRemove(filename) - dirs[contentDirKey{dir, filename, btype}] = true - } - - for dir := range dirs { - for _, pm := range c.contentMap.pmaps { - pm.s.ResourceSpec.DeleteBySubstring(dir.dirname) + c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{ + NumWorkers: numWorkers, + Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error { + if err := c.m.AddFi(fi); err != nil { + if errors.Is(err, pageparser.ErrPlainHTMLDocumentsNotSupported) { + // Reclassify this as a static file. + if err := c.copyFile(fi); err != nil { + return err + } + } else { + return hugofs.AddFileInfoToError(err, fi, c.fs) + } } + numFilesProcessedTotal.Add(1) + if numFilesProcessedTotal.Load()%1000 == 0 { + logFilesProcessed(false) + } + return nil + }, + }) - switch dir.tp { - case bundleLeaf: - collectErr = c.collectDir(dir.dirname, true, nil) - case bundleBranch: - isCascading, section := c.isCascadingEdit(dir) - - if isCascading { - c.contentMap.deleteSection(section) - } - collectErr = c.collectDir(dir.dirname, !isCascading, nil) - default: + if c.ids == nil { + // Collect everything. + collectErr = c.collectDir(nil, false, nil) + } else { + for _, s := range c.h.Sites { + s.pageMap.cfg.isRebuild = true + } + + for _, id := range c.ids { + if id.p.IsLeafBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + return true + }, + ) + } else if id.p.IsBranchBundle() { + collectErr = c.collectDir( + id.p, + false, + func(fim hugofs.FileMetaInfo) bool { + if fim.IsDir() { + return true + } + fimp := fim.Meta().PathInfo + if fimp == nil { + return false + } + + return strings.HasPrefix(fimp.Path(), paths.AddTrailingSlash(id.p.Dir())) + }, + ) + } else { // We always start from a directory. - collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool { - return dir.filename == fim.Meta().Filename + collectErr = c.collectDir(id.p, id.isDir, func(fim hugofs.FileMetaInfo) bool { + if id.delete || id.isDir { + if id.isDir { + return strings.HasPrefix(fim.Meta().PathInfo.Path(), paths.AddTrailingSlash(id.p.Path())) + } + + return id.p.Dir() == fim.Meta().PathInfo.Dir() + } + return id.p.Path() == fim.Meta().PathInfo.Path() }) } @@ -209,160 +207,51 @@ func (c *pagesCollector) Collect() (collectErr error) { } - return -} - -func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool { - class := fi.Meta().Classifier - return class == files.ContentClassLeaf || class == files.ContentClassBranch -} - -func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string { - lang := fi.Meta().Lang - if lang != "" { - return lang - } - return c.sp.Cfg.DefaultContentLanguage() -} - -func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirType, bundles pageBundles) error { - getBundle := func(lang string) *fileinfoBundle { - return bundles[lang] + werr := c.g.Wait() + if collectErr == nil { + collectErr = werr } - cloneBundle := func(lang string) *fileinfoBundle { - // Every bundled content file needs a content file header. - // Use the default content language if found, else just - // pick one. - var ( - source *fileinfoBundle - found bool - ) - - source, found = bundles[c.sp.Cfg.DefaultContentLanguage()] - if !found { - for _, b := range bundles { - source = b - break - } - } - - if source == nil { - panic(fmt.Sprintf("no source found, %d", len(bundles))) - } - - clone := c.cloneFileInfo(source.header) - clone.Meta().Lang = lang - - return &fileinfoBundle{ - header: clone, - } - } + return +} - lang := c.getLang(info) - bundle := getBundle(lang) - isBundleHeader := c.isBundleHeader(info) - if bundle != nil && isBundleHeader { - // index.md file inside a bundle, see issue 6208. - info.Meta().Classifier = files.ContentClassContent - isBundleHeader = false - } - classifier := info.Meta().Classifier - isContent := classifier == files.ContentClassContent - if bundle == nil { - if isBundleHeader { - bundle = &fileinfoBundle{header: info} - bundles[lang] = bundle +func (c *pagesCollector) collectDir(dirPath *paths.Path, isDir bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { + var dpath string + if dirPath != nil { + if isDir { + dpath = filepath.FromSlash(dirPath.Path()) } else { - if btyp == bundleBranch { - // No special logic for branch bundles. - // Every language needs its own _index.md file. - // Also, we only clone bundle headers for lonesome, bundled, - // content files. - return c.handleFiles(info) - } - - if isContent { - bundle = cloneBundle(lang) - bundles[lang] = bundle - } + dpath = filepath.FromSlash(dirPath.Dir()) } } - if !isBundleHeader && bundle != nil { - bundle.resources = append(bundle.resources, info) - } - - if classifier == files.ContentClassFile { - translations := info.Meta().Translations - - for lang, b := range bundles { - if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) { - - // Clone and add it to the bundle. - clone := c.cloneFileInfo(info) - clone.Meta().Lang = lang - b.resources = append(b.resources, clone) - } - } + if c.seenDirs[dpath] { + return nil } + c.seenDirs[dpath] = true - return nil -} - -func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo { - return hugofs.NewFileMetaInfo(fi, hugofs.NewFileMeta()) -} - -func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error { - fi, err := c.fs.Stat(dirname) + root, err := c.fs.Stat(dpath) if err != nil { - if herrors.IsNotExist(err) { - // May have been deleted. + if os.IsNotExist(err) { return nil } return err } - handleDir := func( - btype bundleDirType, - dir hugofs.FileMetaInfo, - path string, - readdir []hugofs.FileMetaInfo) error { - if btype > bundleNot && c.tracker != nil { - c.tracker.add(path, btype) - } - - if btype == bundleBranch { - if err := c.handleBundleBranch(readdir); err != nil { - return err - } - // A branch bundle is only this directory level, so keep walking. - return nil - } else if btype == bundleLeaf { - if err := c.handleBundleLeaf(dir, path, readdir); err != nil { - return err - } - - return nil - } - - if err := c.handleFiles(readdir...); err != nil { - return err - } + rootm := root.(hugofs.FileMetaInfo) - return nil + if err := c.collectDirDir(dpath, rootm, inFilter); err != nil { + return err } - filter := func(fim hugofs.FileMetaInfo) bool { - if fim.Meta().SkipDir { - return false - } + return nil +} +func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, inFilter func(fim hugofs.FileMetaInfo) bool) error { + filter := func(fim hugofs.FileMetaInfo) bool { if c.sp.IgnoreFile(fim.Meta().Filename) { return false } - if inFilter != nil { return inFilter(fim) } @@ -370,83 +259,63 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func( } preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - var btype bundleDirType - filtered := readdir[:0] for _, fi := range readdir { if filter(fi) { filtered = append(filtered, fi) - - if c.tracker != nil { - // Track symlinks. - c.tracker.addSymbolicLinkMapping(fi) - } } } - walkRoot := dir.Meta().IsRootFile readdir = filtered + if len(readdir) == 0 { + return nil, nil + } - // We merge language directories, so there can be duplicates, but they - // will be ordered, most important first. - var duplicates []int - seen := make(map[string]bool) - - for i, fi := range readdir { - + // Pick the first regular file. + var first hugofs.FileMetaInfo + for _, fi := range readdir { if fi.IsDir() { continue } + first = fi + break + } - meta := fi.Meta() - meta.IsRootFile = walkRoot - class := meta.Classifier - translationBase := meta.TranslationBaseNameWithExt - key := pth.Join(meta.Lang, translationBase) - - if seen[key] { - duplicates = append(duplicates, i) - continue - } - seen[key] = true + if first == nil { + // Only dirs, keep walking. + return readdir, nil + } - var thisBtype bundleDirType + // Any bundle file will always be first. + firstPi := first.Meta().PathInfo + if firstPi == nil { + panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename)) + } - switch class { - case files.ContentClassLeaf: - thisBtype = bundleLeaf - case files.ContentClassBranch: - thisBtype = bundleBranch + if firstPi.IsLeafBundle() { + if err := c.handleBundleLeaf(dir, first, path, readdir); err != nil { + return nil, err } + return nil, filepath.SkipDir + } - // Folders with both index.md and _index.md type of files have - // undefined behaviour and can never work. - // The branch variant will win because of sort order, but log - // a warning about it. - if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype { - c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename) - // Reclassify it so it will be handled as a content file inside the - // section, which is in line with the <= 0.55 behaviour. - meta.Classifier = files.ContentClassContent - } else if thisBtype > bundleNot { - btype = thisBtype + for _, fi := range readdir { + if fi.IsDir() { + continue } - } - - if len(duplicates) > 0 { - for i := len(duplicates) - 1; i >= 0; i-- { - idx := duplicates[i] - readdir = append(readdir[:idx], readdir[idx+1:]...) + meta := fi.Meta() + pi := meta.PathInfo + if pi == nil { + panic(fmt.Sprintf("no path info for %q", meta.Filename)) } - } - err := handleDir(btype, dir, path, readdir) - if err != nil { - return nil, err - } + if meta.Lang == "" { + panic("lang not set") + } - if btype == bundleLeaf || partial { - return nil, filepath.SkipDir + if err := c.g.Enqueue(fi); err != nil { + return nil, err + } } // Keep walking. @@ -454,126 +323,56 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func( } var postHook hugofs.WalkHook - if c.tracker != nil { - postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) { - if c.tracker == nil { - // Nothing to do. - return readdir, nil - } - - return readdir, nil - } - } - - wfn := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } + wfn := func(path string, fi hugofs.FileMetaInfo) error { return nil } - fim := fi.(hugofs.FileMetaInfo) - // Make sure the pages in this directory gets re-rendered, - // even in fast render mode. - fim.Meta().IsRootFile = true - - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Fs: c.fs, - Logger: c.logger, - Root: dirname, - Info: fim, - HookPre: preHook, - HookPost: postHook, - WalkFn: wfn, - }) + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Logger: c.logger, + Root: path, + Info: root, + Fs: c.fs, + HookPre: preHook, + HookPost: postHook, + WalkFn: wfn, + }) return w.Walk() } -func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - var contentFiles []hugofs.FileMetaInfo - - for _, fim := range readdir { - - if fim.IsDir() { - continue +func (c *pagesCollector) handleBundleLeaf(dir, bundle hugofs.FileMetaInfo, inPath string, readdir []hugofs.FileMetaInfo) error { + bundlePi := bundle.Meta().PathInfo + walk := func(path string, info hugofs.FileMetaInfo) error { + if info.IsDir() { + return nil } - meta := fim.Meta() + pi := info.Meta().PathInfo - switch meta.Classifier { - case files.ContentClassContent: - contentFiles = append(contentFiles, fim) - default: - if err := c.addToBundle(fim, bundleBranch, bundles); err != nil { - return err + if info != bundle { + // Everything inside a leaf bundle is a Resource, + // even the content pages. + // Note that we do allow index.md as page resources, but not in the bundle root. + if !pi.IsLeafBundle() || pi.Dir() != bundlePi.Dir() { + paths.ModifyPathBundleTypeResource(pi) } } - } - - // Make sure the section is created before its pages. - if err := c.proc.Process(bundles); err != nil { - return err - } - - return c.handleFiles(contentFiles...) -} - -func (c *pagesCollector) handleBundleLeaf(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) error { - // Maps bundles to its language. - bundles := pageBundles{} - - walk := func(path string, info hugofs.FileMetaInfo, err error) error { - if err != nil { - return err - } - if info.IsDir() { - return nil - } - - return c.addToBundle(info, bundleLeaf, bundles) + return c.g.Enqueue(info) } // Start a new walker from the given path. - w := hugofs.NewWalkway(hugofs.WalkwayConfig{ - Root: path, - Fs: c.fs, - Logger: c.logger, - Info: dir, - DirEntries: readdir, - WalkFn: walk, - }) - - if err := w.Walk(); err != nil { - return err - } - - return c.proc.Process(bundles) -} - -func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error { - for _, fi := range fis { - if fi.IsDir() { - continue - } + w := hugofs.NewWalkway( + hugofs.WalkwayConfig{ + Root: inPath, + Fs: c.fs, + Logger: c.logger, + Info: dir, + DirEntries: readdir, + WalkFn: walk, + }) - if err := c.proc.Process(fi); err != nil { - return err - } - } - return nil -} - -func stringSliceContains(k string, values ...string) bool { - for _, v := range values { - if k == v { - return true - } - } - return false + return w.Walk() } diff --git a/hugolib/pages_capture_test.go b/hugolib/pages_capture_test.go deleted file mode 100644 index c771d30eee9..00000000000 --- a/hugolib/pages_capture_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - "testing" - - qt "github.com/frankban/quicktest" - "github.com/gohugoio/hugo/common/loggers" - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/config/testconfig" - "github.com/gohugoio/hugo/source" - "github.com/spf13/afero" -) - -func TestPagesCapture(t *testing.T) { - - c := qt.New(t) - - afs := afero.NewMemMapFs() - - writeFile := func(filename string) { - c.Assert(afero.WriteFile(afs, filepath.Join("content", filepath.FromSlash(filename)), []byte(fmt.Sprintf("content-%s", filename)), 0755), qt.IsNil) - } - - writeFile("_index.md") - writeFile("logo.png") - writeFile("root.md") - writeFile("blog/index.md") - writeFile("blog/hello.md") - writeFile("blog/images/sunset.png") - writeFile("pages/page1.md") - writeFile("pages/page2.md") - - cfg := config.New() - d := testconfig.GetTestDeps(afs, cfg) - sourceSpec := source.NewSourceSpec(d.PathSpec, nil, d.BaseFs.Content.Fs) - - t.Run("Collect", func(t *testing.T) { - c := qt.New(t) - proc := &testPagesCollectorProcessor{} - coll := newPagesCollector(sourceSpec, nil, loggers.NewDefault(), nil, proc) - c.Assert(coll.Collect(), qt.IsNil) - // 2 bundles, 3 pages. - c.Assert(len(proc.items), qt.Equals, 5) - }) - -} - -type testPagesCollectorProcessor struct { - items []any - waitErr error -} - -func (proc *testPagesCollectorProcessor) Process(item any) error { - proc.items = append(proc.items, item) - return nil -} - -func (proc *testPagesCollectorProcessor) Start(ctx context.Context) context.Context { - return ctx -} - -func (proc *testPagesCollectorProcessor) Wait() error { return proc.waitErr } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index 55241d30667..8a5d6c184b9 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -70,8 +70,8 @@ func TestMergeLanguages(t *testing.T) { c.Assert(len(firstNN.Sites()), qt.Equals, 4) c.Assert(firstNN.Sites().First().Language().Lang, qt.Equals, "en") - nnBundle := nnSite.getPage("page", "bundle") - enBundle := enSite.getPage("page", "bundle") + nnBundle := nnSite.getPageOldVersion("page", "bundle") + enBundle := enSite.getPageOldVersion("page", "bundle") c.Assert(len(enBundle.Resources()), qt.Equals, 6) c.Assert(len(nnBundle.Resources()), qt.Equals, 2) diff --git a/hugolib/pages_process.go b/hugolib/pages_process.go deleted file mode 100644 index b0c04244beb..00000000000 --- a/hugolib/pages_process.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2019 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -import ( - "context" - "fmt" - "path/filepath" - - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/source" - - "github.com/gohugoio/hugo/hugofs/files" - "golang.org/x/sync/errgroup" - - "github.com/gohugoio/hugo/common/herrors" - "github.com/gohugoio/hugo/hugofs" -) - -func newPagesProcessor(h *HugoSites, sp *source.SourceSpec) *pagesProcessor { - procs := make(map[string]pagesCollectorProcessorProvider) - for _, s := range h.Sites { - procs[s.Lang()] = &sitePagesProcessor{ - m: s.pageMap, - errorSender: s.h, - itemChan: make(chan interface{}, config.GetNumWorkerMultiplier()*2), - } - } - return &pagesProcessor{ - procs: procs, - } -} - -type pagesCollectorProcessorProvider interface { - Process(item any) error - Start(ctx context.Context) context.Context - Wait() error -} - -type pagesProcessor struct { - // Per language/Site - procs map[string]pagesCollectorProcessorProvider -} - -func (proc *pagesProcessor) Process(item any) error { - switch v := item.(type) { - // Page bundles mapped to their language. - case pageBundles: - for _, vv := range v { - proc.getProcFromFi(vv.header).Process(vv) - } - case hugofs.FileMetaInfo: - proc.getProcFromFi(v).Process(v) - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - - } - - return nil -} - -func (proc *pagesProcessor) Start(ctx context.Context) context.Context { - for _, p := range proc.procs { - ctx = p.Start(ctx) - } - return ctx -} - -func (proc *pagesProcessor) Wait() error { - var err error - for _, p := range proc.procs { - if e := p.Wait(); e != nil { - err = e - } - } - return err -} - -func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider { - if p, found := proc.procs[fi.Meta().Lang]; found { - return p - } - return defaultPageProcessor -} - -type nopPageProcessor int - -func (nopPageProcessor) Process(item any) error { - return nil -} - -func (nopPageProcessor) Start(ctx context.Context) context.Context { - return context.Background() -} - -func (nopPageProcessor) Wait() error { - return nil -} - -var defaultPageProcessor = new(nopPageProcessor) - -type sitePagesProcessor struct { - m *pageMap - errorSender herrors.ErrorSender - - ctx context.Context - itemChan chan any - itemGroup *errgroup.Group -} - -func (p *sitePagesProcessor) Process(item any) error { - select { - case <-p.ctx.Done(): - return nil - default: - p.itemChan <- item - } - return nil -} - -func (p *sitePagesProcessor) Start(ctx context.Context) context.Context { - p.itemGroup, ctx = errgroup.WithContext(ctx) - p.ctx = ctx - p.itemGroup.Go(func() error { - for item := range p.itemChan { - if err := p.doProcess(item); err != nil { - return err - } - } - return nil - }) - return ctx -} - -func (p *sitePagesProcessor) Wait() error { - close(p.itemChan) - return p.itemGroup.Wait() -} - -func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error { - meta := fim.Meta() - f, err := meta.Open() - if err != nil { - return fmt.Errorf("copyFile: failed to open: %w", err) - } - - s := p.m.s - - target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path) - - defer f.Close() - - fs := s.PublishFsStatic - - return s.publish(&s.PathSpec.ProcessingStats.Files, target, f, fs) -} - -func (p *sitePagesProcessor) doProcess(item any) error { - m := p.m - switch v := item.(type) { - case *fileinfoBundle: - if err := m.AddFilesBundle(v.header, v.resources...); err != nil { - return err - } - case hugofs.FileMetaInfo: - if p.shouldSkip(v) { - return nil - } - meta := v.Meta() - - classifier := meta.Classifier - switch classifier { - case files.ContentClassContent: - if err := m.AddFilesBundle(v); err != nil { - return err - } - case files.ContentClassFile: - if err := p.copyFile(v); err != nil { - return err - } - default: - panic(fmt.Sprintf("invalid classifier: %q", classifier)) - } - default: - panic(fmt.Sprintf("unrecognized item type in Process: %T", item)) - } - return nil -} - -func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool { - return p.m.s.conf.IsLangDisabled(fim.Meta().Lang) -} diff --git a/hugolib/paths/paths.go b/hugolib/paths/paths.go index 83d5921e088..397dba3f809 100644 --- a/hugolib/paths/paths.go +++ b/hugolib/paths/paths.go @@ -87,12 +87,13 @@ func (p *Paths) AllModules() modules.Modules { } // GetBasePath returns any path element in baseURL if needed. +// The path returned will have a leading, but no trailing slash. func (p *Paths) GetBasePath(isRelativeURL bool) string { if isRelativeURL && p.Cfg.CanonifyURLs() { // The baseURL will be prepended later. return "" } - return p.Cfg.BaseURL().BasePath + return p.Cfg.BaseURL().BasePathNoTrailingSlash } func (p *Paths) Lang() string { diff --git a/hugolib/rebuild_test.go b/hugolib/rebuild_test.go new file mode 100644 index 00000000000..9fbef8e3b1c --- /dev/null +++ b/hugolib/rebuild_test.go @@ -0,0 +1,1256 @@ +package hugolib + +import ( + "fmt" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/fortytw2/leaktest" + qt "github.com/frankban/quicktest" + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/htesting" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass" + "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss" +) + +const rebuildFilesSimple = ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +[outputs] +home = ["html"] +section = ["html"] +page = ["html"] +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectionbundle/index.md -- +--- +title: "My Section Bundle" +--- +My Section Bundle Content. +-- content/mysection/mysectionbundle/mysectionbundletext.txt -- +My Section Bundle Text 2 Content. +-- content/mysection/mysectionbundle/mysectionbundlecontent.md -- +--- +title: "My Section Bundle Content" +--- +My Section Bundle Content. +-- content/mysection/_index.md -- +--- +title: "My Section" +--- +-- content/mysection/mysectiontext.txt -- +-- content/_index.md -- +--- +title: "Home" +--- +Home Content. +-- content/hometext.txt -- +Home Text Content. +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}$ +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +Len Resources: {{ len .Resources }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}$ +Len Resources: {{ len .Resources }}| +Resources: {{ range $i, $e := .Resources }}{{ $i }}:{{ .RelPermalink }}|{{ .Content }}|{{ end }}$ +-- layouts/shortcodes/foo.html -- +Foo. + +` + +func TestRebuildEditTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Resources: 0:/mysection/mysectionbundle/mysectionbundletext.txt|My Section Bundle Text 2 Content.|1:|My Section Bundle Content.
\n|$") + + b.EditFileReplaceAll("content/mysection/mysectionbundle/mysectionbundletext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", + "Text 2 Content Edited") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.EditFileReplaceAll("content/hometext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/index.html", "Home Content.") + b.AssertFileContent("public/index.html", "Home Text Content Edited.") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.EditFileReplaceAll("content/mysection/mysectiontext.txt", "Content.", "Content Edited.").Build() + b.AssertFileContent("public/mysection/index.html", "My Section") + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildRenameTextFileInLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "My Section Bundle Text 2 Content.") + + b.RenameFile("content/mysection/mysectionbundle/mysectionbundletext.txt", "content/mysection/mysectionbundle/mysectionbundletext2.txt").Build() + b.AssertFileContent("public/mysection/mysectionbundle/index.html", "mysectionbundletext2", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(3) +} + +func TestRebuildRenameTextFileInBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/mysection/index.html", "My Section") + + b.RenameFile("content/mysection/mysectiontext.txt", "content/mysection/mysectiontext2.txt").Build() + b.AssertFileContent("public/mysection/index.html", "mysectiontext2", "My Section") + b.AssertRenderCountPage(2) + b.AssertRenderCountContent(2) +} + +func TestRebuildRenameTextFileInHomeBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.AssertFileContent("public/index.html", "Home Text Content.") + + b.RenameFile("content/hometext.txt", "content/hometext2.txt").Build() + b.AssertFileContent("public/index.html", "hometext2", "Home Text Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithLeafBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection/mysectionbundle", "content/mysection/mysectionbundlerenamed").Build() + b.AssertFileContent("public/mysection/mysectionbundlerenamed/index.html", "My Section Bundle") + b.AssertRenderCountPage(1) +} + +func TestRebuildRenameDirectoryWithBranchBundle(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuildRenameDirectoryWithRegularPageUsedInHome(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/foo/p1.md -- +--- +title: "P1" +--- +-- layouts/index.html -- +Pages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Pages: /foo/p1/|$") + + b.RenameDir("content/foo", "content/bar").Build() + + b.AssertFileContent("public/index.html", "Pages: /bar/p1/|$") +} + +func TestRebuildAddRegularFileRegularPageUsedInHomeMultilingual(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +[languages.fr] +weight = 3 +[languages.a] +weight = 4 +[languages.b] +weight = 5 +[languages.c] +weight = 6 +[languages.d] +weight = 7 +[languages.e] +weight = 8 +[languages.f] +weight = 9 +[languages.g] +weight = 10 +[languages.h] +weight = 11 +[languages.i] +weight = 12 +[languages.j] +weight = 13 +-- content/foo/_index.md -- +-- content/foo/data.txt -- +-- content/foo/p1.md -- +-- content/foo/p1.nn.md -- +-- content/foo/p1.fr.md -- +-- content/foo/p1.a.md -- +-- content/foo/p1.b.md -- +-- content/foo/p1.c.md -- +-- content/foo/p1.d.md -- +-- content/foo/p1.e.md -- +-- content/foo/p1.f.md -- +-- content/foo/p1.g.md -- +-- content/foo/p1.h.md -- +-- content/foo/p1.i.md -- +-- content/foo/p1.j.md -- +-- layouts/index.html -- +RegularPages: {{ range .Site.RegularPages }}{{ .RelPermalink }}|{{ end }}$ +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|$") + b.AssertFileContent("public/nn/index.html", "RegularPages: /nn/foo/p1/|$") + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|$") + + b.AddFiles("content/foo/p2.md", ``).Build() + + b.AssertFileContent("public/index.html", "RegularPages: /foo/p1/|/foo/p2/|$") + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|$") + + b.AddFiles("content/foo/p2.fr.md", ``).Build() + b.AssertFileContent("public/fr/index.html", "RegularPages: /fr/foo/p1/|/fr/foo/p2/|$") + + b.AddFiles("content/foo/p2.i.md", ``).Build() + b.AssertFileContent("public/i/index.html", "RegularPages: /i/foo/p1/|/i/foo/p2/|$") +} + +func TestRebuildRenameDirectoryWithBranchBundleFastRender(t *testing.T) { + recentlyVisited := types.NewEvictingStringQueue(10).Add("/a/b/c/") + b := TestRunning(t, rebuildFilesSimple, func(cfg *IntegrationTestConfig) { cfg.BuildCfg = BuildCfg{RecentlyVisited: recentlyVisited} }) + b.RenameDir("content/mysection", "content/mysectionrenamed").Build() + b.AssertFileContent("public/mysectionrenamed/index.html", "My Section") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/index.html", "My Section Bundle") + b.AssertFileContent("public/mysectionrenamed/mysectionbundle/mysectionbundletext.txt", "My Section Bundle Text 2 Content.") + b.AssertRenderCountPage(2) +} + +func TestRebuilErrorRecovery(t *testing.T) { + b := TestRunning(t, rebuildFilesSimple) + _, err := b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "My Section Bundle Content.", "My Section Bundle Content\n\n\n\n{{< foo }}.").BuildE() + + b.Assert(err, qt.Not(qt.IsNil)) + b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`"/content/mysection/mysectionbundle/index.md:8:9": unrecognized character`)) + + // Fix the error + b.EditFileReplaceAll("content/mysection/mysectionbundle/index.md", "{{< foo }}", "{{< foo >}}").Build() +} + +func TestRebuildScopedToOutputFormat(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +outputs: ["html", "json"] +--- +P1 Content. + +{{< myshort >}} +-- layouts/_default/single.html -- +Single HTML: {{ .Title }}|{{ .Content }}| +-- layouts/_default/single.json -- +Single JSON: {{ .Title }}|{{ .Content }}| +-- layouts/shortcodes/myshort.html -- +My short. +` + b := Test(t, files, TestOptRunning()) + b.AssertRenderCountPage(3) + b.AssertRenderCountContent(1) + b.AssertFileContent("public/p1/index.html", "Single HTML: P1|P1 Content.
\n") + b.AssertFileContent("public/p1/index.json", "Single JSON: P1|P1 Content.
\n") + b.EditFileReplaceAll("layouts/_default/single.html", "Single HTML", "Single HTML Edited").Build() + b.AssertFileContent("public/p1/index.html", "Single HTML Edited: P1|P1 Content.
\n") + b.AssertRenderCountPage(1) + + // Edit shortcode. Note that this is reused across all output formats. + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "My short", "My short edited").Build() + b.AssertFileContent("public/p1/index.html", "My short edited") + b.AssertFileContent("public/p1/index.json", "My short edited") + b.AssertRenderCountPage(3) // rss (uses .Content) + 2 single pages. +} + +func TestRebuildBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +{{ define "main" }} +Home: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/index.html", "Baseof: Hugo Site|", "Home: Hugo Site||") + b.EditFileReplaceFunc("layouts/_default/baseof.html", func(s string) string { + return strings.Replace(s, "Baseof", "Baseof Edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Baseof Edited: Hugo Site|", "Home: Hugo Site||") +} + +func TestRebuildSingleWithBaseof(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +title = "Hugo Site" +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +--- +P1 Content. +-- layouts/_default/baseof.html -- +Baseof: {{ .Title }}| +{{ block "main" . }}default{{ end }} +-- layouts/index.html -- +Home. +-- layouts/_default/single.html -- +{{ define "main" }} +Single: {{ .Title }}|{{ .Content }}| +{{ end }} +` + b := Test(t, files, TestOptRunning()) + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle: P1|P1 Content.
\n|") + b.EditFileReplaceFunc("layouts/_default/single.html", func(s string) string { + return strings.Replace(s, "Single", "Single Edited", 1) + }).Build() + b.AssertFileContent("public/p1/index.html", "Baseof: P1|\n\nSingle Edited: P1|P1 Content.
\n|") +} + +func TestRebuildFromString(t *testing.T) { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/p1.md -- +--- +title: "P1" +layout: "l1" +--- +P1 Content. +-- content/p2.md -- +--- +title: "P2" +layout: "l2" +--- +P2 Content. +-- assets/mytext.txt -- +My Text +-- layouts/_default/l1.html -- +{{ $r := partial "get-resource.html" . }} +L1: {{ .Title }}|{{ .Content }}|R: {{ $r.Content }}| +-- layouts/_default/l2.html -- +L2. +-- layouts/partials/get-resource.html -- +{{ $mytext := resources.Get "mytext.txt" }} +{{ $txt := printf "Text: %s" $mytext.Content }} +{{ $r := resources.FromString "r.txt" $txt }} +{{ return $r }} + +` + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "L1: P1|P1 Content.
\n|R: Text: My Text|") + + b.EditFileReplaceAll("assets/mytext.txt", "My Text", "My Text Edited").Build() + + b.AssertFileContent("public/p1/index.html", "L1: P1|P1 Content.
\n|R: Text: My Text Edited|") + + b.AssertRenderCountPage(1) +} + +func TestRebuildDeeplyNestedLink(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotstxt", "404"] +disableLiveReload = true +-- content/s/p1.md -- +--- +title: "P1" +--- +-- content/s/p2.md -- +--- +title: "P2" +--- +-- content/s/p3.md -- +--- +title: "P3" +--- +-- content/s/p4.md -- +--- +title: "P4" +--- +-- content/s/p5.md -- +--- +title: "P5" +--- +-- content/s/p6.md -- +--- +title: "P6" +--- +-- content/s/p7.md -- +--- +title: "P7" +--- +-- layouts/_default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +-- layouts/_default/single.html -- +Next: {{ with .PrevInSection }}{{ .Title }}{{ end }}| +Prev: {{ with .NextInSection }}{{ .Title }}{{ end }}| + + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/s/p1/index.html", "Next: P2|") + b.EditFileReplaceAll("content/s/p7.md", "P7", "P7 Edited").Build() + b.AssertFileContent("public/s/p6/index.html", "Next: P7 Edited|") +} + +func TestRebuildVariations(t *testing.T) { + // t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4 + // This leaktest seems to be a little bit shaky on Travis. + if !htesting.IsCI() { + defer leaktest.CheckTimeout(t, 10*time.Second)() + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +defaultContentLanguage = "nn" +paginate = 20 +[security] +enableInlineShortcodes = true +[languages] +[languages.en] +weight = 1 +[languages.nn] +weight = 2 +-- content/mysect/p1/index.md -- +--- +title: "P1" +--- +P1 Content. +{{< include "mysect/p2" >}} +§§§go { page="mysect/p3" } +hello +§§§ + +{{< foo.inline >}}Foo{{< /foo.inline >}} +-- content/mysect/p2/index.md -- +--- +title: "P2" +--- +P2 Content. +-- content/mysect/p3/index.md -- +--- +title: "P3" +--- +P3 Content. +-- content/mysect/sub/_index.md -- +-- content/mysect/sub/p4/index.md -- +--- +title: "P4" +--- +P4 Content. +-- content/mysect/sub/p5/index.md -- +--- +title: "P5" +lastMod: 2019-03-02 +--- +P5 Content. +-- content/myothersect/_index.md -- +--- +cascade: +- _target: + cascadeparam: "cascadevalue" +--- +-- content/myothersect/sub/_index.md -- +-- content/myothersect/sub/p6/index.md -- +--- +title: "P6" +--- +P6 Content. +-- content/translations/p7.en.md -- +--- +title: "P7 EN" +--- +P7 EN Content. +-- content/translations/p7.nn.md -- +--- +title: "P7 NN" +--- +P7 NN Content. +-- layouts/index.html -- +Home: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .RelPermalink }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +Site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }}| +Paginate: {{ range (.Paginate .Site.RegularPages).Pages }}{{ .RelPermalink }}|{{ .Title }}|{{ end }}$ +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +Single Partial Cached: {{ partialCached "pcached" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +RegularPages: {{ range .RegularPages }}{{ .Title }}|{{ end }}$ +Len RegularPagesRecursive: {{ len .RegularPagesRecursive }} +RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .RelPermalink }}|{{ end }}$ +List Partial P1: {{ partial "p1" . }}| +Page.Lastmod: {{ .Lastmod.Format "2006-01-02" }}| +Cascade param: {{ .Params.cascadeparam }}| +-- layouts/partials/p1.html -- +Partial P1. +-- layouts/partials/pcached.html -- +Partial Pcached. +-- layouts/shortcodes/include.html -- +{{ $p := site.GetPage (.Get 0)}} +{{ with $p }} +Shortcode Include: {{ .Title }}| +{{ end }} +Shortcode .Page.Title: {{ .Page.Title }}| +Shortcode Partial P1: {{ partial "p1" . }}| +-- layouts/_default/_markup/render-codeblock.html -- +{{ $p := site.GetPage (.Attributes.page)}} +{{ with $p }} +Codeblock Include: {{ .Title }}| +{{ end }} + + + +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + BuildCfg: BuildCfg{ + testCounters: &buildCounters{}, + }, + // Verbose: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + // When running the server, this is done on shutdown. + // Do this here to satisfy the leak detector above. + defer func() { + b.Assert(b.H.Close(), qt.IsNil) + }() + + contentRenderCount := b.counters.contentRenderCounter.Load() + pageRenderCount := b.counters.pageRenderCounter.Load() + + b.Assert(contentRenderCount > 0, qt.IsTrue) + b.Assert(pageRenderCount > 0, qt.IsTrue) + + // Test cases: + // - Edit content file direct + // - Edit content file transitive shortcode + // - Edit content file transitive render hook + // - Rename one languge version of a content file + // - Delete content file, check site.RegularPages and section.RegularPagesRecursive (length) + // - Add content file (see above). + // - Edit shortcode + // - Edit inline shortcode + // - Edit render hook + // - Edit partial used in template + // - Edit partial used in shortcode + // - Edit partial cached. + // - Edit lastMod date in content file, check site.Lastmod. + editFile := func(filename string, replacementFunc func(s string) string) { + b.EditFileReplaceFunc(filename, replacementFunc).Build() + b.Assert(b.counters.contentRenderCounter.Load() < contentRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.contentRenderCounter.Load(), contentRenderCount)) + b.Assert(b.counters.pageRenderCounter.Load() < pageRenderCount, qt.IsTrue, qt.Commentf("count %d < %d", b.counters.pageRenderCounter.Load(), pageRenderCount)) + } + + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Site.Lastmod: 2019-03-02") + + b.AssertFileContent("public/mysect/p1/index.html", + "Single: P1|P1 Content.", + "Shortcode Include: P2|", + "Codeblock Include: P3|") + + editFile("content/mysect/p1/index.md", func(s string) string { + return strings.ReplaceAll(s, "P1", "P1 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Single: P1 Edited|
P1 Edited Content.") + b.AssertFileContent("public/index.html", "RegularPages: $", "Len RegularPagesRecursive: 7", "Paginate: /mysect/sub/p5/|P5|/mysect/p1/|P1 Edited") + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2|P3|$", "Len RegularPagesRecursive: 5") + + // p2 is included in p1 via shortcode. + editFile("content/mysect/p2/index.md", func(s string) string { + return strings.ReplaceAll(s, "P2", "P2 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include: P2 Edited|") + + // p3 is included in p1 via codeblock hook. + editFile("content/mysect/p3/index.md", func(s string) string { + return strings.ReplaceAll(s, "P3", "P3 Edited") + }) + + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include: P3 Edited|") + + // Remove a content file in a nested section. + b.RemoveFiles("content/mysect/sub/p4/index.md").Build() + b.AssertFileContent("public/mysect/index.html", "RegularPages: P1 Edited|P2 Edited|P3 Edited", "Len RegularPagesRecursive: 4") + b.AssertFileContent("public/mysect/sub/index.html", "RegularPages: P5|$", "RegularPagesRecursive: 1") + + // Rename one of the translations. + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + b.RenameFile("content/translations/p7.nn.md", "content/translations/p7rename.nn.md").Build() + b.AssertFileContent("public/translations/index.html", "RegularPagesRecursive: /translations/p7rename/") + b.AssertFileContent("public/en/translations/index.html", "RegularPagesRecursive: /en/translations/p7/") + + // Edit shortcode + editFile("layouts/shortcodes/include.html", func(s string) string { + return s + "\nShortcode Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Include Edited.") + + // Edit render hook + editFile("layouts/_default/_markup/render-codeblock.html", func(s string) string { + return s + "\nCodeblock Include Edited." + }) + b.AssertFileContent("public/mysect/p1/index.html", "Codeblock Include Edited.") + + // Edit partial p1 + editFile("layouts/partials/p1.html", func(s string) string { + return strings.Replace(s, "Partial P1", "Partial P1 Edited", 1) + }) + b.AssertFileContent("public/mysect/index.html", "List Partial P1: Partial P1 Edited.") + b.AssertFileContent("public/mysect/p1/index.html", "Shortcode Partial P1: Partial P1 Edited.") + + // Edit partial cached. + editFile("layouts/partials/pcached.html", func(s string) string { + return strings.Replace(s, "Partial Pcached", "Partial Pcached Edited", 1) + }) + b.AssertFileContent("public/mysect/p1/index.html", "Pcached Edited.") + + // Edit lastMod date in content file, check site.Lastmod. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2019-03-02", "2020-03-10", 1) + }) + b.AssertFileContent("public/index.html", "Site.Lastmod: 2020-03-10|") + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2020-03-10|") + + // Adjust the date back a few days. + editFile("content/mysect/sub/p5/index.md", func(s string) string { + return strings.Replace(s, "2020-03-10", "2019-03-08", 1) + }) + b.AssertFileContent("public/mysect/index.html", "Page.Lastmod: 2019-03-08|") + b.AssertFileContent("public/index.html", "Site.Lastmod: 2019-03-08|") + + // Check cascade mods. + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue|") + + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadevalue", "cascadevalue edited", 1) + }) + b.AssertFileContent("public/myothersect/index.html", "Cascade param: cascadevalue edited|") + b.AssertFileContent("public/myothersect/sub/p6/index.html", "Cascade param: cascadevalue edited|") + + // Repurpose the cascadeparam to set the title. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "cascadeparam:", "title:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: |", "List: cascadevalue edited|") + + // Revert it. + editFile("content/myothersect/_index.md", func(s string) string { + return strings.Replace(s, "title:", "cascadeparam:", 1) + }) + b.AssertFileContent("public/myothersect/sub/index.html", "Cascade param: cascadevalue edited|", "List: |") +} + +func TestRebuildVariationsJSNoneFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/main.css -- +body { + background: red; +} +-- layouts/default/list.html -- +List. +-- layouts/_default/single.html -- +Single. +{{ $css := resources.Get "main.css" | minify }} +RelPermalink: {{ $css.RelPermalink }}| + +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "RelPermalink: /main.min.css|") + b.AssertFileContent("public/main.min.css", "body{background:red}") + + b.EditFileReplaceAll("assets/main.css", "red", "blue") + b.RemoveFiles("content/p2/index.md") + b.RemoveFiles("content/p3/index.md") + b.Build() + + b.AssertFileContent("public/main.min.css", "body{background:blue}") +} + +func TestRebuildVariationsJSInNestedCachedPartialFingerprinted(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/p1/index.md -- +--- +title: "P1" +--- +P1. +-- content/p2/index.md -- +--- +title: "P2" +--- +P2. +-- content/p3/index.md -- +--- +title: "P3" +--- +P3. +-- content/p4/index.md -- +--- +title: "P4" +--- +P4. +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/list.html -- +List. {{ partial "head.html" . }}$ +-- layouts/_default/single.html -- +Single. {{ partial "head.html" . }}$ +-- layouts/partials/head.html -- +{{ partialCached "js.html" . }}$ +-- layouts/partials/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/p1/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + for i := 1; i < 5; i++ { + b.AssertFileContent(fmt.Sprintf("public/p%d/index.html", i), "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") + } + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSInNestedPartialFingerprintedInBase(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com/" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- assets/js/main.js -- +console.log("Hello"); +-- layouts/_default/baseof.html -- +Base. {{ partial "common/head.html" . }}$ +{{ block "main" . }}default{{ end }} +-- layouts/_default/list.html -- +{{ define "main" }}main{{ end }} +-- layouts/partials/common/head.html -- +{{ partial "myfiles/js.html" . }}$ +-- layouts/partials/myfiles/js.html -- +{{ $js := resources.Get "js/main.js" | js.Build | fingerprint }} +RelPermalink: {{ $js.RelPermalink }}| +` + + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "/js/main.712a50b59d0f0dedb4e3606eaa3860b1f1a5305f6c42da30a2985e473ba314eb.js") + + b.EditFileReplaceAll("assets/js/main.js", "Hello", "Hello is Edited").Build() + + b.AssertFileContent("public/index.html", "/js/main.6535698cec9a21875f40ae03e96f30c4bee41a01e979224761e270b9034b2424.js") +} + +func TestRebuildVariationsJSBundled(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "robotsTXT", "404", "rss"] +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +-- content/p1.md -- +--- +title: "P1" +layout: "main" +--- +-- content/p2.md -- +--- +title: "P2" +--- +{{< jsfingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "plain" +--- +{{< jsfingerprinted >}} +-- content/main.js -- +console.log("Hello"); +-- content/foo.js -- +console.log("Foo"); +-- layouts/index.html -- +Home. +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} + +{{ end }} +-- layouts/_default/single.html -- +Single. Deliberately no .Content in here. +-- layouts/_default/plain.html -- +Content: {{ .Content }}| +-- layouts/_default/main.html -- +{{ $js := site.Home.Resources.Get "main.js" }} +{{ with $js }} + +{{ end }} +-- layouts/shortcodes/jsfingerprinted.html -- +{{ $js := site.Home.Resources.Get "foo.js" | fingerprint }} + +` + + testCounters := &buildCounters{} + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + // Verbose: true, + BuildCfg: BuildCfg{ + testCounters: testCounters, + }, + }, + ).Build() + + b.AssertFileContent("public/index.html", ``) + b.AssertFileContent("public/p1/index.html", "") + b.AssertFileContent("public/p2/index.html", "Single. Deliberately no .Content in here.") + b.AssertFileContent("public/p3/index.html", "foo.57b4465b908531b43d4e4680ab1063d856b475cb1ae81ad43e0064ecf607bec1.js") + b.AssertRenderCountPage(4) + + // Edit JS file. + b.EditFileReplaceFunc("content/main.js", func(s string) string { + return strings.Replace(s, "Hello", "Hello is Edited", 1) + }).Build() + + b.AssertFileContent("public/p1/index.html", "") + // The p1 (the one inlining the JS) should be rebuilt. + b.AssertRenderCountPage(2) + // But not the content file. + b.AssertRenderCountContent(0) + + // This is included with RelPermalink in a shortcode used in p3, but it's fingerprinted + // so we need to rebuild on change. + b.EditFileReplaceFunc("content/foo.js", func(s string) string { + return strings.Replace(s, "Foo", "Foo Edited", 1) + }).Build() + + // Verify that the hash has changed. + b.AssertFileContent("public/p3/index.html", "foo.3a332a088521231e5fc9bd22f15e0ccf507faa7b373fbff22959005b9a80481c.js") + + b.AssertRenderCountPage(1) + b.AssertRenderCountContent(1) +} + +func TestRebuildEditData(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableLiveReload = true +[security] +enableInlineShortcodes=true +-- data/mydata.yaml -- +foo: bar +-- content/_index.md -- +--- +title: "Home" +--- +{{< data "mydata.foo" >}}} +-- content/p1.md -- +--- +title: "P1" +--- + +Foo inline: {{< foo.inline >}}{{ site.Data.mydata.foo }}|{{< /foo.inline >}} +-- layouts/shortcodes/data.html -- +{{ $path := split (.Get 0) "." }} +{{ $data := index site.Data $path }} +Foo: {{ $data }}| +-- layouts/index.html -- +Content: {{ .Content }}| +-- layouts/_default/single.html -- +Single: {{ .Content }}| +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Foo: bar|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar|") + b.EditFileReplaceFunc("data/mydata.yaml", func(s string) string { + return strings.Replace(s, "bar", "bar edited", 1) + }).Build() + b.AssertFileContent("public/index.html", "Foo: bar edited|") + b.AssertFileContent("public/p1/index.html", "Foo inline: bar edited|") +} + +func TestRebuildEditHomeContent(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableLiveReload = true +-- content/_index.md -- +--- +title: "Home" +--- +Home. +-- layouts/index.html -- +Content: {{ .Content }} +` + b := TestRunning(t, files) + + b.AssertFileContent("public/index.html", "Content:
Home.
") + b.EditFileReplaceAll("content/_index.md", "Home.", "Home").Build() + b.AssertFileContent("public/index.html", "Content:Home
") +} + +func TestRebuildVariationsAssetsJSImport(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/index.html -- +Home. {{ now }} +{{ with (resources.Get "js/main.js" | js.Build | fingerprint) }} + +{{ end }} +-- assets/js/lib/foo.js -- +export function foo() { + console.log("Foo"); +} +-- assets/js/main.js -- +import { foo } from "./lib/foo.js"; +console.log("Hello"); +foo(); +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + // LogLevel: logg.LevelTrace, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo") + // Edit the imported file. + b.EditFileReplaceAll("assets/js/lib/foo.js", "Foo", "Foo Edited").Build() + b.AssertFileContent("public/index.html", "Home.", "Hello", "Foo Edited") +} + +func TestRebuildVariationsAssetsPostCSSImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy", "sitemap", "rss"] +disableLiveReload = true +-- assets/css/lib/foo.css -- +body { + background: red; +} +-- assets/css/main.css -- +@import "lib/foo.css"; +-- package.json -- +{ + "devDependencies": { + "postcss-cli": "^9.0.1" + } +} +-- content/p1.md -- +--- +title: "P1" +--- +-- content/p2.md -- +--- +title: "P2" +layout: "foo" +--- +{{< fingerprinted >}} +-- content/p3.md -- +--- +title: "P3" +layout: "foo" +--- +{{< notfingerprinted >}} +-- layouts/shortcodes/fingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts | fingerprint) }} + +{{ end }} +-- layouts/shortcodes/notfingerprinted.html -- +Fingerprinted. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +-- layouts/index.html -- +Home. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +-- layouts/_default/foo.html -- +Foo. +{{ .Title }}|{{ .Content }}| +-- layouts/_default/single.html -- +Single. +{{ $opts := dict "inlineImports" true "noMap" true }} +{{ with (resources.Get "css/main.css" | postCSS $opts) }} + +{{ end }} +` + + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + NeedsNpmInstall: true, + // LogLevel: logg.LevelTrace, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "") + b.AssertFileContent("public/p1/index.html", "Single.", "/css/main.css") + b.AssertRenderCountPage(4) + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.css", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertRenderCountPage(4) + + b.AssertFileContent("public/index.html", "Home.", "") +} + +func TestRebuildVariationsAssetsSassImport(t *testing.T) { + if !htesting.IsCI() { + t.Skip("skip CI only") + } + + filesTemplate := ` +-- hugo.toml -- +baseURL = "https://example.com" +disableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- assets/css/lib/foo.scss -- +body { + background: red; +} +-- assets/css/main.scss -- +@import "lib/foo"; +-- layouts/index.html -- +Home. +{{ $opts := dict "transpiler" "TRANSPILER" }} +{{ with (resources.Get "css/main.scss" | toCSS $opts) }} + +{{ end }} +` + + runTest := func(transpiler string) { + t.Run(transpiler, func(t *testing.T) { + files := strings.Replace(filesTemplate, "TRANSPILER", transpiler, 1) + b := NewIntegrationTestBuilder( + IntegrationTestConfig{ + T: t, + TxtarString: files, + Running: true, + NeedsOsFS: true, + }, + ).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: red") + + // Edit the imported file. + b.EditFileReplaceFunc("assets/css/lib/foo.scss", func(s string) string { + return strings.Replace(s, "red", "blue", 1) + }).Build() + + b.AssertFileContent("public/index.html", "Home.", "background: blue") + }) + } + + if scss.Supports() { + runTest("libsass") + } + + if dartsass.Supports() { + runTest("dartsass") + } +} + +func benchmarkFilesEdit(count int) string { + files := ` +-- hugo.toml -- +baseURL = "https://example.com" +disdableKinds = ["term", "taxonomy"] +disableLiveReload = true +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +-- content/mysect/_index.md -- +--- +title: "My Sect" +--- + ` + + contentTemplate := ` +--- +title: "P%d" +--- +P%d Content. +` + + for i := 0; i < count; i++ { + files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i)) + } + + return files +} + +func BenchmarkRebuildContentFileChange(b *testing.B) { + files := benchmarkFilesEdit(500) + + cfg := IntegrationTestConfig{ + T: b, + TxtarString: files, + Running: true, + // Verbose: true, + // LogLevel: logg.LevelInfo, + } + builders := make([]*IntegrationTestBuilder, b.N) + + for i := range builders { + builders[i] = NewIntegrationTestBuilder(cfg) + builders[i].Build() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + bb := builders[i] + bb.EditFileReplaceFunc("content/mysect/p123/index.md", func(s string) string { + return s + "... Edited" + }).Build() + // fmt.Println(bb.LogString()) + } +} diff --git a/hugolib/rendershortcodes_test.go b/hugolib/rendershortcodes_test.go index c6fa711cce7..d0bc0546c41 100644 --- a/hugolib/rendershortcodes_test.go +++ b/hugolib/rendershortcodes_test.go @@ -1,4 +1,4 @@ -// Copyright 2023 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -63,7 +63,7 @@ Fragments: {{ .Fragments.Identifiers }}| HasShortcode Level 1: {{ .HasShortcode "include" }}| HasShortcode Level 2: {{ .HasShortcode "withmarkdown" }}| HasShortcode Level 3: {{ .HasShortcode "level3" }}| -HasSHortcode not found: {{ .HasShortcode "notfound" }}| +HasShortcode not found: {{ .HasShortcode "notfound" }}| Content: {{ .Content }}| ` @@ -79,11 +79,8 @@ Content: {{ .Content }}| "HasShortcode Level 1: true|", "HasShortcode Level 2: true|", "HasShortcode Level 3: true|", - "HasSHortcode not found: false|", + "HasShortcode not found: false|", ) - - // TODO1 more assertions. - } func TestRenderShortcodesNestedMultipleOutputFormatTemplates(t *testing.T) { @@ -130,7 +127,6 @@ JSON: {{ .Content }} b.AssertFileContent("public/p1/index.html", "Myshort HTML") b.AssertFileContent("public/p1/index.json", "Myshort JSON") - } func TestRenderShortcodesEditNested(t *testing.T) { @@ -159,27 +155,12 @@ title: "p2" Myshort Original. -- layouts/_default/single.html -- {{ .Content }} - - - ` - - b := NewIntegrationTestBuilder( - IntegrationTestConfig{ - T: t, - TxtarString: files, - Running: true, - }, - ).Build() - + b := TestRunning(t, files) b.AssertFileContent("public/p1/index.html", "Myshort Original.") - b.EditFileReplace("layouts/shortcodes/myshort.html", func(s string) string { - return "Myshort Edited." - }) - b.Build() + b.EditFileReplaceAll("layouts/shortcodes/myshort.html", "Original", "Edited").Build() b.AssertFileContent("public/p1/index.html", "Myshort Edited.") - } func TestRenderShortcodesEditIncludedPage(t *testing.T) { @@ -223,10 +204,9 @@ Myshort Original. b.AssertFileContent("public/p1/index.html", "Original") - b.EditFileReplace("content/p2.md", func(s string) string { + b.EditFileReplaceFunc("content/p2.md", func(s string) string { return strings.Replace(s, "Original", "Edited", 1) }) b.Build() b.AssertFileContent("public/p1/index.html", "Edited") - } diff --git a/hugolib/renderstring_test.go b/hugolib/renderstring_test.go index e0a4cd0369a..40980bdcb4c 100644 --- a/hugolib/renderstring_test.go +++ b/hugolib/renderstring_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 The Hugo Authors. All rights reserved. +// Copyright 2024 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -121,7 +121,6 @@ HasShortcode: foo:{{ .HasShortcode "foo" }}:false ` t.Run("Basic", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -139,11 +138,9 @@ HasShortcode: mark2:true:true HasShortcode: foo:false:false Page Type: *hugolib.pageForShortcode`, ) - }) t.Run("Edit shortcode", func(t *testing.T) { - b := NewIntegrationTestBuilder( IntegrationTestConfig{ T: t, @@ -157,7 +154,6 @@ Page Type: *hugolib.pageForShortcode`, b.AssertFileContent("public/p1/index.html", `Edit shortcode`, ) - }) } @@ -189,7 +185,6 @@ Page Kind: home Has myshort: true Has other: false `) - } func TestRenderStringWithShortcodeIssue10654(t *testing.T) { diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go index 17c3b2f0cbc..1365db72c67 100644 --- a/hugolib/resource_chain_test.go +++ b/hugolib/resource_chain_test.go @@ -36,11 +36,10 @@ func TestResourceChainBasic(t *testing.T) { failIfHandler := func(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/fail.jpg" { - http.Error(w, "{ msg: failed }", 501) + http.Error(w, "{ msg: failed }", http.StatusNotImplemented) return } h.ServeHTTP(w, r) - }) } ts := httptest.NewServer( @@ -89,7 +88,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . fs := b.Fs.Source imageDir := filepath.Join("assets", "images") - b.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil) + b.Assert(os.MkdirAll(imageDir, 0o777), qt.IsNil) src, err := os.Open("testdata/sunset.jpg") b.Assert(err, qt.IsNil) out, err := fs.Create(filepath.Join(imageDir, "sunset.jpg")) @@ -101,18 +100,18 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with . b.Running() for i := 0; i < 2; i++ { - + b.Logf("Test run %d", i) b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", fmt.Sprintf(` -SUNSET: images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT: images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 +SUNSET: /images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT: /images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 CSS integrity Data first: sha256-od9YaHw8nMOL8mUy97Sy8sKwMV3N4hI3aVmZXATxH+8= /styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css CSS integrity Data last: /styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css sha256-HPxSmGg2QF03+ZmKY/1t2GCOjEEOXj2x2qow94vCc7o= -SUNSET REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 -FIT REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_0_200x200_fit_q75_box.jpg|200 +SUNSET REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587 +FIT REMOTE: /sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200 REMOTE NOT FOUND: OK LOCAL NOT FOUND: OK PRINT PROTOCOL ERROR DETAILS: Err: error calling resources.GetRemote: Get "gopher://example.org": unsupported protocol scheme "gopher"|| @@ -125,9 +124,9 @@ FAILED REMOTE ERROR DETAILS CONTENT: |failed to fetch remote resource: Not Imple b.AssertFileContent("public/styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css", "body{background-color:#add8e6}") b.AssertFileContent("public//styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css", "body{background-color:orange}") - b.EditFiles("page1.md", ` + b.EditFiles("content/_index.md", ` --- -title: "Page 1 edit" +title: "Home edit" summary: "Edited summary" --- @@ -135,9 +134,6 @@ Edited content. `) - b.Assert(b.Fs.WorkingDirWritable.Remove("public"), qt.IsNil) - b.H.ResourceSpec.ClearCaches() - } } @@ -147,7 +143,9 @@ func TestResourceChainPostProcess(t *testing.T) { rnd := rand.New(rand.NewSource(time.Now().UnixNano())) b := newTestSitesBuilder(t) - b.WithConfigFile("toml", `[minify] + b.WithConfigFile("toml", ` +disableLiveReload = true +[minify] minifyOutput = true [minify.tdewolff] [minify.tdewolff.html] @@ -184,7 +182,7 @@ End.`) b.AssertFileContent("public/index.html", `Start. HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html|Integrity: md5-otHLJPJLMip9rVIEFMUj6Q==|MediaType: text/html -HELLO2: Name: hello.html|Content:List 1
\necho "foo";\n
")
-
}
func TestShortcodeHighlightDeindent(t *testing.T) {
@@ -1041,7 +1037,6 @@ title: "p1"
`)
-
}
// Issue 10236.
@@ -1073,7 +1068,6 @@ Title: {{ .Get "title" | safeHTML }}
).Build()
b.AssertFileContent("public/p1/index.html", `Title: Steve "Francia".`)
-
}
// Issue 10391.
@@ -1166,7 +1160,6 @@ C'est un test
).Build()
b.AssertFileContent("public/fr/p2/index.html", `plus-dinformations`)
-
}
// Issue 10671.
@@ -1281,5 +1274,4 @@ Hello.
).Build()
b.AssertFileContent("public/p1/index.html", "Hello.")
-
}
diff --git a/hugolib/site.go b/hugolib/site.go
index c682eebc99b..312f6b97f1c 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1,4 +1,4 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -19,18 +19,18 @@ import (
"io"
"mime"
"net/url"
- "path"
"path/filepath"
"runtime"
"sort"
"strings"
+ "sync"
"time"
"github.com/bep/logg"
- "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/hugolib/doctree"
"golang.org/x/text/unicode/norm"
"github.com/gohugoio/hugo/common/paths"
@@ -41,11 +41,6 @@ import (
"github.com/gohugoio/hugo/markup/converter"
- "github.com/gohugoio/hugo/hugofs/files"
- hglob "github.com/gohugoio/hugo/hugofs/glob"
-
- "github.com/gohugoio/hugo/common/maps"
-
"github.com/gohugoio/hugo/common/text"
"github.com/gohugoio/hugo/publisher"
@@ -55,19 +50,14 @@ import (
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
- "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/lazy"
"github.com/fsnotify/fsnotify"
bp "github.com/gohugoio/hugo/bufferpool"
- "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/output"
- "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/tpl"
-
- "github.com/spf13/afero"
)
func (s *Site) Taxonomies() page.TaxonomyList {
@@ -75,25 +65,32 @@ func (s *Site) Taxonomies() page.TaxonomyList {
return s.taxonomies
}
-type taxonomiesConfig map[string]string
+type (
+ taxonomiesConfig map[string]string
+ taxonomiesConfigValues struct {
+ views []viewName
+ viewsByTreeKey map[string]viewName
+ }
+)
-func (t taxonomiesConfig) Values() []viewName {
- var vals []viewName
+func (t taxonomiesConfig) Values() taxonomiesConfigValues {
+ var views []viewName
for k, v := range t {
- vals = append(vals, viewName{singular: k, plural: v})
+ views = append(views, viewName{singular: k, plural: v, pluralTreeKey: cleanTreeKey(v)})
}
- sort.Slice(vals, func(i, j int) bool {
- return vals[i].plural < vals[j].plural
+ sort.Slice(views, func(i, j int) bool {
+ return views[i].plural < views[j].plural
})
- return vals
-}
+ viewsByTreeKey := make(map[string]viewName)
+ for _, v := range views {
+ viewsByTreeKey[v.pluralTreeKey] = v
+ }
-type siteConfigHolder struct {
- sitemap config.SitemapConfig
- taxonomiesConfig taxonomiesConfig
- timeout time.Duration
- hasCJKLanguage bool
+ return taxonomiesConfigValues{
+ views: views,
+ viewsByTreeKey: viewsByTreeKey,
+ }
}
// Lazily loaded site dependencies.
@@ -111,15 +108,6 @@ func (init *siteInit) Reset() {
init.taxonomies.Reset()
}
-func (s *Site) initInit(ctx context.Context, init *lazy.Init, pctx pageContext) bool {
- _, err := init.Do(ctx)
-
- if err != nil {
- s.h.FatalError(pctx.wrapError(err))
- }
- return err == nil
-}
-
func (s *Site) prepareInits() {
s.init = &siteInit{}
@@ -153,11 +141,6 @@ func (s *Site) prepareInits() {
})
s.init.prevNextInSection = init.Branch(func(context.Context) (any, error) {
- var sections page.Pages
- s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) {
- sections = append(sections, n.p)
- })
-
setNextPrev := func(pas page.Pages) {
for i, p := range pas {
np, ok := p.(nextPrevInSectionProvider)
@@ -183,40 +166,35 @@ func (s *Site) prepareInits() {
}
}
- for _, sect := range sections {
- treeRef := sect.(treeRefProvider).getTreeRef()
-
- var pas page.Pages
- treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
- pas = append(pas, c.p)
- })
- page.SortByDefault(pas)
+ sections := s.pageMap.getPagesInSection(
+ pageMapQueryPagesInSection{
+ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
+ Path: "",
+ KeyPart: "sectionorhome",
+ Include: pagePredicates.KindSection.Or(pagePredicates.KindHome),
+ },
+ IncludeSelf: true,
+ Recursive: true,
+ },
+ )
- setNextPrev(pas)
+ for _, section := range sections {
+ setNextPrev(section.RegularPages())
}
- // The root section only goes one level down.
- treeRef := s.home.getTreeRef()
-
- var pas page.Pages
- treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
- pas = append(pas, c.p)
- })
- page.SortByDefault(pas)
-
- setNextPrev(pas)
-
return nil, nil
})
s.init.menus = init.Branch(func(context.Context) (any, error) {
- s.assembleMenus()
- return nil, nil
+ err := s.assembleMenus()
+ return nil, err
})
- s.init.taxonomies = init.Branch(func(context.Context) (any, error) {
- err := s.pageMap.assembleTaxonomies()
- return nil, err
+ s.init.taxonomies = init.Branch(func(ctx context.Context) (any, error) {
+ if err := s.pageMap.CreateSiteTaxonomies(ctx); err != nil {
+ return nil, err
+ }
+ return s.taxonomies, nil
})
}
@@ -232,20 +210,25 @@ func (s *Site) Menus() navigation.Menus {
func (s *Site) initRenderFormats() {
formatSet := make(map[string]bool)
formats := output.Formats{}
- rssDisabled := !s.conf.IsKindEnabled("rss")
- s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool {
- for _, f := range n.p.m.configuredOutputFormats {
- if rssDisabled && f.Name == "rss" {
- // legacy
- continue
- }
- if !formatSet[f.Name] {
- formats = append(formats, f)
- formatSet[f.Name] = true
+
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: s.pageMap.treePages,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if p, ok := n.(*pageState); ok {
+ for _, f := range p.m.configuredOutputFormats {
+ if !formatSet[f.Name] {
+ formats = append(formats, f)
+ formatSet[f.Name] = true
+ }
+ }
}
- }
- return false
- })
+ return false, nil
+ },
+ }
+
+ if err := w.Walk(context.TODO()); err != nil {
+ panic(err)
+ }
// Add the per kind configured output formats
for _, kind := range kinds.AllKindsInPages {
@@ -275,10 +258,6 @@ func (s *Site) Languages() langs.Languages {
return s.h.Configs.Languages
}
-func (s *Site) isEnabled(kind string) bool {
- return s.conf.IsKindEnabled(kind)
-}
-
type siteRefLinker struct {
s *Site
@@ -303,7 +282,7 @@ func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text.
} else if p == nil {
s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what)
} else {
- s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Pathc(), what)
+ s.errorLogger.Logf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Path(), what)
}
}
@@ -391,8 +370,26 @@ func (s *Site) watching() bool {
}
type whatChanged struct {
- source bool
- files map[string]bool
+ mu sync.Mutex
+
+ contentChanged bool
+ identitySet identity.Identities
+}
+
+func (w *whatChanged) Add(ids ...identity.Identity) {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ for _, id := range ids {
+ w.identitySet[id] = true
+ }
+}
+
+func (w *whatChanged) Changes() []identity.Identity {
+ if w == nil || w.identitySet == nil {
+ return nil
+ }
+ return w.identitySet.AsSlice()
}
// RegisterMediaTypes will register the Site's media types in the mime
@@ -405,10 +402,10 @@ func (s *Site) RegisterMediaTypes() {
}
}
-func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
- var filtered []fsnotify.Event
+func (h *HugoSites) fileEventsFilter(events []fsnotify.Event) []fsnotify.Event {
seen := make(map[fsnotify.Event]bool)
+ n := 0
for _, ev := range events {
// Avoid processing the same event twice.
if seen[ev] {
@@ -416,17 +413,7 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
}
seen[ev] = true
- if s.SourceSpec.IgnoreFile(ev.Name) {
- continue
- }
-
- // Throw away any directories
- isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name)
- if err != nil && herrors.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) {
- // Force keep of event
- isRegular = true
- }
- if !isRegular {
+ if h.SourceSpec.IgnoreFile(ev.Name) {
continue
}
@@ -434,23 +421,22 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
ev.Name = norm.NFC.String(ev.Name)
}
- filtered = append(filtered, ev)
+ events[n] = ev
+ n++
}
-
- return filtered
+ return events[:n]
}
-func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
- var filtered []fsnotify.Event
-
+func (h *HugoSites) fileEventsTranslate(events []fsnotify.Event) []fsnotify.Event {
eventMap := make(map[string][]fsnotify.Event)
// We often get a Remove etc. followed by a Create, a Create followed by a Write.
- // Remove the superfluous events to mage the update logic simpler.
+ // Remove the superfluous events to make the update logic simpler.
for _, ev := range events {
eventMap[ev.Name] = append(eventMap[ev.Name], ev)
}
+ n := 0
for _, ev := range events {
mapped := eventMap[ev.Name]
@@ -472,236 +458,77 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
}
}
- filtered = append(filtered, kept)
+ events[n] = kept
+ n++
}
- return filtered
+ return events
}
-// reBuild partially rebuilds a site given the filesystem events.
-// It returns whatever the content source was changed.
-// TODO(bep) clean up/rewrite this method.
-func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
- events = s.filterFileEvents(events)
- events = s.translateFileEvents(events)
-
- changeIdentities := make(identity.Identities)
-
- s.Log.Debugf("Rebuild for events %q", events)
+func (h *HugoSites) fileEventsContentPaths(p []pathChange) []pathChange {
+ var bundles []pathChange
+ var dirs []pathChange
+ var regular []pathChange
- h := s.h
-
- // First we need to determine what changed
-
- var (
- sourceChanged = []fsnotify.Event{}
- sourceReallyChanged = []fsnotify.Event{}
- contentFilesChanged []string
-
- tmplChanged bool
- tmplAdded bool
- dataChanged bool
- i18nChanged bool
-
- sourceFilesChanged = make(map[string]bool)
- )
-
- var cacheBusters []func(string) bool
- bcfg := s.conf.Build
-
- for _, ev := range events {
- component, relFilename := s.BaseFs.MakePathRelative(ev.Name)
- if relFilename != "" {
- p := hglob.NormalizePath(path.Join(component, relFilename))
- g, err := bcfg.MatchCacheBuster(s.Log, p)
- if err == nil && g != nil {
- cacheBusters = append(cacheBusters, g)
- }
- }
-
- id, found := s.eventToIdentity(ev)
- if found {
- changeIdentities[id] = id
-
- switch id.Type {
- case files.ComponentFolderContent:
- s.Log.Println("Source changed", ev)
- sourceChanged = append(sourceChanged, ev)
- case files.ComponentFolderLayouts:
- tmplChanged = true
- if !s.Tmpl().HasTemplate(id.Path) {
- tmplAdded = true
- }
- if tmplAdded {
- s.Log.Println("Template added", ev)
- } else {
- s.Log.Println("Template changed", ev)
- }
-
- case files.ComponentFolderData:
- s.Log.Println("Data changed", ev)
- dataChanged = true
- case files.ComponentFolderI18n:
- s.Log.Println("i18n changed", ev)
- i18nChanged = true
-
- }
+ var others []pathChange
+ for _, p := range p {
+ if p.isDir {
+ dirs = append(dirs, p)
+ } else {
+ others = append(others, p)
}
}
- changed := &whatChanged{
- source: len(sourceChanged) > 0,
- files: sourceFilesChanged,
- }
-
- config.whatChanged = changed
-
- if err := init(config); err != nil {
- return err
- }
-
- var cacheBusterOr func(string) bool
- if len(cacheBusters) > 0 {
- cacheBusterOr = func(s string) bool {
- for _, cb := range cacheBusters {
- if cb(s) {
- return true
+ // Remve all files below dir.
+ if len(dirs) > 0 {
+ n := 0
+ for _, d := range dirs {
+ dir := d.p.Path() + "/"
+ for _, o := range others {
+ if !strings.HasPrefix(o.p.Path(), dir) {
+ others[n] = o
+ n++
}
}
- return false
- }
- }
-
- // These in memory resource caches will be rebuilt on demand.
- if len(cacheBusters) > 0 {
- s.h.ResourceSpec.ResourceCache.DeleteMatches(cacheBusterOr)
- }
-
- if tmplChanged || i18nChanged {
- s.h.init.Reset()
- var prototype *deps.Deps
- for i, s := range s.h.Sites {
- if err := s.Deps.Compile(prototype); err != nil {
- return err
- }
- if i == 0 {
- prototype = s.Deps
- }
- }
- }
-
- if dataChanged {
- s.h.init.data.Reset()
- }
-
- for _, ev := range sourceChanged {
- removed := false
-
- if ev.Op&fsnotify.Remove == fsnotify.Remove {
- removed = true
- }
-
- // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
- // Sometimes a rename operation means that file has been renamed other times it means
- // it's been updated
- if ev.Op&fsnotify.Rename == fsnotify.Rename {
- // If the file is still on disk, it's only been updated, if it's not, it's been moved
- if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil {
- removed = true
- }
- }
-
- if removed && files.IsContentFile(ev.Name) {
- h.removePageByFilename(ev.Name)
- }
-
- sourceReallyChanged = append(sourceReallyChanged, ev)
- sourceFilesChanged[ev.Name] = true
- }
- if config.ErrRecovery || tmplAdded || dataChanged {
- h.resetPageState()
- } else {
- h.resetPageStateFromEvents(changeIdentities)
- }
-
- if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 {
- var filenamesChanged []string
- for _, e := range sourceReallyChanged {
- filenamesChanged = append(filenamesChanged, e.Name)
}
- if len(contentFilesChanged) > 0 {
- filenamesChanged = append(filenamesChanged, contentFilesChanged...)
- }
-
- filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged)
-
- if err := s.readAndProcessContent(*config, filenamesChanged...); err != nil {
- return err
- }
-
+ others = others[:n]
}
- return nil
-}
-
-func (s *Site) process(config BuildCfg) (err error) {
- if err = s.readAndProcessContent(config); err != nil {
- err = fmt.Errorf("readAndProcessContent: %w", err)
- return
- }
- return err
-}
-
-func (s *Site) render(ctx *siteRenderContext) (err error) {
- if err := page.Clear(); err != nil {
- return err
- }
-
- if ctx.outIdx == 0 {
- // Note that even if disableAliases is set, the aliases themselves are
- // preserved on page. The motivation with this is to be able to generate
- // 301 redirects in a .htacess file and similar using a custom output format.
- if !s.conf.DisableAliases {
- // Aliases must be rendered before pages.
- // Some sites, Hugo docs included, have faulty alias definitions that point
- // to itself or another real page. These will be overwritten in the next
- // step.
- if err = s.renderAliases(); err != nil {
- return
- }
+ for _, p := range others {
+ if p.p.IsBundle() {
+ bundles = append(bundles, p)
+ } else {
+ regular = append(regular, p)
}
}
- if err = s.renderPages(ctx); err != nil {
- return
- }
-
- if ctx.outIdx == 0 {
- if err = s.renderSitemap(); err != nil {
- return
- }
+ // Remove any files below leaf bundles.
+ // Remove any files in the same folder as branch bundles.
+ var keepers []pathChange
- if ctx.multihost {
- if err = s.renderRobotsTXT(); err != nil {
- return
+ for _, o := range regular {
+ keep := true
+ for _, b := range bundles {
+ prefix := b.p.Base() + "/"
+ if b.p.IsLeafBundle() && strings.HasPrefix(o.p.Path(), prefix) {
+ keep = false
+ break
+ } else if b.p.IsBranchBundle() && o.p.Dir() == b.p.Dir() {
+ keep = false
+ break
}
}
- if err = s.render404(); err != nil {
- return
+ if keep {
+ keepers = append(keepers, o)
}
}
- if !ctx.renderSingletonPages() {
- return
- }
+ keepers = append(dirs, keepers...)
+ keepers = append(bundles, keepers...)
- if err = s.renderMainLanguageRedirect(); err != nil {
- return
- }
-
- return
+ return keepers
}
// HomeAbsURL is a convenience method giving the absolute URL to the home page.
@@ -723,47 +550,20 @@ func (s *Site) SitemapAbsURL() string {
return p
}
-func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) {
- for _, fs := range s.BaseFs.SourceFilesystems.FileSystems() {
- if p := fs.Path(e.Name); p != "" {
- return identity.NewPathIdentity(fs.Name, filepath.ToSlash(p)), true
- }
- }
- return identity.PathIdentity{}, false
-}
-
-func (s *Site) readAndProcessContent(buildConfig BuildCfg, filenames ...string) error {
- if s.Deps == nil {
- panic("nil deps on site")
- }
-
- sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs)
-
- proc := newPagesProcessor(s.h, sourceSpec)
-
- c := newPagesCollector(sourceSpec, s.h.getContentMaps(), s.Log, s.h.ContentChanges, proc, filenames...)
-
- if err := c.Collect(); err != nil {
- return err
- }
-
- return nil
-}
-
func (s *Site) createNodeMenuEntryURL(in string) string {
if !strings.HasPrefix(in, "/") {
return in
}
// make it match the nodes
menuEntryURL := in
- menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(s.s.PathSpec.URLize(menuEntryURL))
+ menuEntryURL = s.s.PathSpec.URLize(menuEntryURL)
if !s.conf.CanonifyURLs {
menuEntryURL = paths.AddContextRoot(s.s.PathSpec.Cfg.BaseURL().String(), menuEntryURL)
}
return menuEntryURL
}
-func (s *Site) assembleMenus() {
+func (s *Site) assembleMenus() error {
s.menus = make(navigation.Menus)
type twoD struct {
@@ -775,14 +575,9 @@ func (s *Site) assembleMenus() {
// add menu entries from config to flat hash
for name, menu := range s.conf.Menus.Config {
for _, me := range menu {
- if types.IsNil(me.Page) {
- if me.PageRef != "" {
- // Try to resolve the page.
- p, _ := s.getPageNew(nil, me.PageRef)
- if !types.IsNil(p) {
- navigation.SetPageValues(me, p)
- }
- }
+ if types.IsNil(me.Page) && me.PageRef != "" {
+ // Try to resolve the page.
+ me.Page, _ = s.getPage(nil, me.PageRef)
}
// If page is still nill, we must make sure that we have a URL that considers baseURL etc.
@@ -797,37 +592,32 @@ func (s *Site) assembleMenus() {
sectionPagesMenu := s.conf.SectionPagesMenu
if sectionPagesMenu != "" {
- s.pageMap.sections.Walk(func(s string, v any) bool {
- p := v.(*contentNode).p
- if p.IsHome() {
- return false
+ if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) {
+ if p.IsHome() || !p.m.shouldBeCheckedForMenuDefinitions() {
+ return false, nil
}
- // From Hugo 0.22 we have nested sections, but until we get a
- // feel of how that would work in this setting, let us keep
- // this menu for the top level only.
+ // The section pages menus are attached to the top level section.
id := p.Section()
if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
- return false
+ return false, nil
}
-
me := navigation.MenuEntry{
MenuConfig: navigation.MenuConfig{
Identifier: id,
Name: p.LinkTitle(),
Weight: p.Weight(),
},
+ Page: p,
}
navigation.SetPageValues(&me, p)
flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
-
- return false
- })
+ return false, nil
+ }); err != nil {
+ return err
+ }
}
-
// Add menu entries provided by pages
- s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool {
- p := n.p
-
+ if err := s.pageMap.forEachPage(pagePredicates.ShouldListGlobal, func(p *pageState) (bool, error) {
for name, me := range p.pageMenus.menus() {
if _, ok := flat[twoD{name, me.KeyName()}]; ok {
err := p.wrapError(fmt.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))
@@ -836,9 +626,10 @@ func (s *Site) assembleMenus() {
}
flat[twoD{name, me.KeyName()}] = me
}
-
- return false
- })
+ return false, nil
+ }); err != nil {
+ return err
+ }
// Create Children Menus First
for _, e := range flat {
@@ -871,6 +662,8 @@ func (s *Site) assembleMenus() {
s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e)
}
}
+
+ return nil
}
// get any language code to prefix the target file path with.
@@ -893,39 +686,12 @@ func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string {
}
return s.GetLanguagePrefix()
-
-}
-
-func (s *Site) getTaxonomyKey(key string) string {
- if s.conf.DisablePathToLower {
- return s.PathSpec.MakePath(key)
- }
- return strings.ToLower(s.PathSpec.MakePath(key))
}
// Prepare site for a new full build.
func (s *Site) resetBuildState(sourceChanged bool) {
s.relatedDocsHandler = s.relatedDocsHandler.Clone()
s.init.Reset()
-
- if sourceChanged {
- s.pageMap.contentMap.pageReverseIndex.Reset()
- s.PageCollections = newPageCollections(s.pageMap)
- s.pageMap.withEveryBundlePage(func(p *pageState) bool {
- p.pagePages = &pagePages{}
- if p.bucket != nil {
- p.bucket.pagesMapBucketPages = &pagesMapBucketPages{}
- }
- p.parent = nil
- p.Scratcher = maps.NewScratcher()
- return false
- })
- } else {
- s.pageMap.withEveryBundlePage(func(p *pageState) bool {
- p.Scratcher = maps.NewScratcher()
- return false
- })
- }
}
func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
@@ -947,7 +713,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
// i.e. 2 arguments, so we test for that.
func (s *Site) GetPage(ref ...string) (page.Page, error) {
- p, err := s.s.getPageOldVersion(ref...)
+ p, err := s.s.getPageForRefs(ref...)
if p == nil {
// The nil struct has meaning in some situations, mostly to avoid breaking
@@ -959,22 +725,6 @@ func (s *Site) GetPage(ref ...string) (page.Page, error) {
return p, err
}
-func (s *Site) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (page.Page, error) {
- p, err := s.GetPage(ref...)
- if p != nil {
- // Track pages referenced by templates/shortcodes
- // when in server mode.
- if im, ok := info.(identity.Manager); ok {
- im.Add(p)
- }
- }
- return p, err
-}
-
-func (s *Site) permalink(link string) string {
- return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.Cfg.BaseURL().String())
-}
-
func (s *Site) absURLPath(targetPath string) string {
var path string
if s.conf.RelativeURLs {
@@ -990,46 +740,23 @@ func (s *Site) absURLPath(targetPath string) string {
return path
}
-func (s *Site) lookupLayouts(layouts ...string) tpl.Template {
- for _, l := range layouts {
- if templ, found := s.Tmpl().Lookup(l); found {
- return templ
- }
- }
-
- return nil
-}
-
-func (s *Site) renderAndWriteXML(ctx context.Context, statCounter *uint64, name string, targetPath string, d any, templ tpl.Template) error {
- renderBuffer := bp.GetBuffer()
- defer bp.PutBuffer(renderBuffer)
-
- if err := s.renderForTemplate(ctx, name, "", d, renderBuffer, templ); err != nil {
- return err
- }
-
- pd := publisher.Descriptor{
- Src: renderBuffer,
- TargetPath: targetPath,
- StatCounter: statCounter,
- // For the minification part of XML,
- // we currently only use the MIME type.
- OutputFormat: output.RSSFormat,
- AbsURLPath: s.absURLPath(targetPath),
- }
-
- return s.publisher.Publish(pd)
-}
+const (
+ pageDependencyScopeDefault int = iota
+ pageDependencyScopeGlobal
+)
-func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, templ tpl.Template) error {
- s.h.IncrPageRender()
+func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, d any, templ tpl.Template) error {
+ s.h.buildCounters.pageRenderCounter.Add(1)
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
of := p.outputFormat()
- ctx := tpl.SetPageInContext(context.Background(), p)
+ p.incrRenderState()
+
+ ctx := tpl.Context.Page.Set(context.Background(), p)
+ ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, p)
- if err := s.renderForTemplate(ctx, p.Kind(), of.Name, p, renderBuffer, templ); err != nil {
+ if err := s.renderForTemplate(ctx, p.Kind(), of.Name, d, renderBuffer, templ); err != nil {
return err
}
@@ -1078,7 +805,6 @@ var infoOnMissingLayout = map[string]bool{
// where ITEM is the thing being hooked.
type hookRendererTemplate struct {
templateHandler tpl.TemplateHandler
- identity.SearchProvider
templ tpl.Template
resolvePosition func(ctx any) text.Position
}
@@ -1119,92 +845,17 @@ func (s *Site) renderForTemplate(ctx context.Context, name, outputFormat string,
return
}
-func (s *Site) lookupTemplate(layouts ...string) (tpl.Template, bool) {
- for _, l := range layouts {
- if templ, found := s.Tmpl().Lookup(l); found {
- return templ, true
- }
- }
-
- return nil, false
-}
-
-func (s *Site) publish(statCounter *uint64, path string, r io.Reader, fs afero.Fs) (err error) {
- s.PathSpec.ProcessingStats.Incr(statCounter)
-
- return helpers.WriteToDisk(filepath.Clean(path), r, fs)
-}
-
-func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string {
- if fi.TranslationBaseName() == "_index" {
- if fi.Dir() == "" {
- return kinds.KindHome
- }
-
- return s.kindFromSections(sections)
-
- }
-
- return kinds.KindPage
-}
-
-func (s *Site) kindFromSections(sections []string) string {
- if len(sections) == 0 {
- return kinds.KindHome
- }
-
- return s.kindFromSectionPath(path.Join(sections...))
-}
-
-func (s *Site) kindFromSectionPath(sectionPath string) string {
- var taxonomiesConfig taxonomiesConfig = s.conf.Taxonomies
- for _, plural := range taxonomiesConfig {
- if plural == sectionPath {
- return kinds.KindTaxonomy
- }
-
- if strings.HasPrefix(sectionPath, plural) {
- return kinds.KindTerm
- }
-
- }
-
- return kinds.KindSection
-}
-
-func (s *Site) newPage(
- n *contentNode,
- parentbBucket *pagesMapBucket,
- kind, title string,
- sections ...string) *pageState {
- m := map[string]any{}
- if title != "" {
- m["title"] = title
- }
-
- p, err := newPageFromMeta(
- n,
- parentbBucket,
- m,
- &pageMeta{
- s: s,
- kind: kind,
- sections: sections,
- })
- if err != nil {
- panic(err)
- }
-
- return p
-}
-
func (s *Site) shouldBuild(p page.Page) bool {
+ if !s.conf.IsKindEnabled(p.Kind()) {
+ return false
+ }
return shouldBuild(s.Conf.BuildFuture(), s.Conf.BuildExpired(),
s.Conf.BuildDrafts(), p.Draft(), p.PublishDate(), p.ExpiryDate())
}
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
- publishDate time.Time, expiryDate time.Time) bool {
+ publishDate time.Time, expiryDate time.Time,
+) bool {
if !(buildDrafts || !Draft) {
return false
}
@@ -1217,3 +868,38 @@ func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bo
}
return true
}
+
+func (s *Site) render(ctx *siteRenderContext) (err error) {
+ if err := page.Clear(); err != nil {
+ return err
+ }
+
+ if ctx.outIdx == 0 {
+ // Note that even if disableAliases is set, the aliases themselves are
+ // preserved on page. The motivation with this is to be able to generate
+ // 301 redirects in a .htacess file and similar using a custom output format.
+ if !s.conf.DisableAliases {
+ // Aliases must be rendered before pages.
+ // Some sites, Hugo docs included, have faulty alias definitions that point
+ // to itself or another real page. These will be overwritten in the next
+ // step.
+ if err = s.renderAliases(); err != nil {
+ return
+ }
+ }
+ }
+
+ if err = s.renderPages(ctx); err != nil {
+ return
+ }
+
+ if !ctx.shouldRenderStandalonePage("") {
+ return
+ }
+
+ if err = s.renderMainLanguageRedirect(); err != nil {
+ return
+ }
+
+ return
+}
diff --git a/hugolib/site_benchmark_new_test.go b/hugolib/site_benchmark_new_test.go
index 5f4d3f11786..023d8e4d5f9 100644
--- a/hugolib/site_benchmark_new_test.go
+++ b/hugolib/site_benchmark_new_test.go
@@ -420,11 +420,11 @@ baseURL = "https://example.com"
createContent := func(dir, name string) {
var content string
if strings.Contains(name, "_index") {
- content = pageContent(1)
+ // Empty
} else {
content = pageContentWithCategory(1, fmt.Sprintf("category%d", r.Intn(5)+1))
- sb.WithContent(filepath.Join("content", dir, name), content)
}
+ sb.WithContent(filepath.Join("content", dir, name), content)
}
for level := 1; level <= r.Intn(5)+1; level++ {
@@ -454,6 +454,9 @@ baseURL = "https://example.com"
func TestBenchmarkSite(b *testing.T) {
benchmarks := getBenchmarkSiteTestCases()
for _, bm := range benchmarks {
+ if bm.name != "Deep content tree" {
+ continue
+ }
b.Run(bm.name, func(b *testing.T) {
s := bm.create(b)
@@ -478,13 +481,13 @@ title: %s
Edited!!`, p.Title()))
- counters := &testCounters{}
+ counters := &buildCounters{}
b.Build(BuildCfg{testCounters: counters})
// We currently rebuild all the language versions of the same content file.
// We could probably optimize that case, but it's not trivial.
- b.Assert(int(counters.contentRenderCounter), qt.Equals, 4)
+ b.Assert(int(counters.contentRenderCounter.Load()), qt.Equals, 4)
b.AssertFileContent("public"+p.RelPermalink()+"index.html", "Edited!!")
}
@@ -534,7 +537,7 @@ func BenchmarkSiteNew(b *testing.B) {
panic("infinite loop")
}
p = pages[rnd.Intn(len(pages))]
- if !p.File().IsZero() {
+ if p.File() != nil {
break
}
}
diff --git a/hugolib/site_new.go b/hugolib/site_new.go
index da9d19f218f..ddf45c28696 100644
--- a/hugolib/site_new.go
+++ b/hugolib/site_new.go
@@ -1,4 +1,4 @@
-// Copyright 2023 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -22,8 +22,8 @@ import (
"sort"
"time"
- radix "github.com/armon/go-radix"
"github.com/bep/logg"
+ "github.com/gohugoio/hugo/cache/dynacache"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
@@ -31,6 +31,7 @@ import (
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/allconfig"
"github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugolib/doctree"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/langs/i18n"
@@ -39,9 +40,9 @@ import (
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/publisher"
- "github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/pagemeta"
+ "github.com/gohugoio/hugo/resources/page/siteidentities"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/tpl/tplimpl"
@@ -50,8 +51,10 @@ import (
var _ page.Site = (*Site)(nil)
type Site struct {
- conf *allconfig.Config
- language *langs.Language
+ conf *allconfig.Config
+ language *langs.Language
+ languagei int
+ pageMap *pageMap
// The owning container.
h *HugoSites
@@ -59,12 +62,10 @@ type Site struct {
*deps.Deps
// Page navigation.
- *PageCollections
+ *pageFinder
taxonomies page.TaxonomyList
menus navigation.Menus
- siteBucket *pagesMapBucket
-
// Shortcut to the home page. Note that this may be nil if
// home page, for some odd reason, is disabled.
home *pageState
@@ -93,7 +94,7 @@ type Site struct {
func (s *Site) Debug() {
fmt.Println("Debugging site", s.Lang(), "=>")
- fmt.Println(s.pageMap.testDump())
+ // fmt.Println(s.pageMap.testDump())
}
// NewHugoSites creates HugoSites from the given config.
@@ -127,10 +128,13 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
logger = loggers.New(logOpts)
}
+ memCache := dynacache.New(dynacache.Options{Running: conf.Running(), Log: logger})
+
firstSiteDeps := &deps.Deps{
Fs: cfg.Fs,
Log: logger,
Conf: conf,
+ MemCache: memCache,
TemplateProvider: tplimpl.DefaultTemplateProvider,
TranslationProvider: i18n.NewTranslationProvider(),
}
@@ -142,14 +146,40 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
confm := cfg.Configs
var sites []*Site
+ ns := &contentNodeShifter{
+ numLanguages: len(confm.Languages),
+ }
+
+ treeConfig := doctree.Config[contentNodeI]{
+ Shifter: ns,
+ }
+
+ pageTrees := &pageTrees{
+ treePages: doctree.New(
+ treeConfig,
+ ),
+ treeResources: doctree.New(
+ treeConfig,
+ ),
+ treeTaxonomyEntries: doctree.NewTreeShiftTree[*weightedContentNode](doctree.DimensionLanguage.Index(), len(confm.Languages)),
+ }
+
+ pageTrees.treePagesResources = doctree.WalkableTrees[contentNodeI]{
+ pageTrees.treePages,
+ pageTrees.treeResources,
+ }
+
+ pageTrees.resourceTrees = doctree.MutableTrees{
+ pageTrees.treeResources,
+ }
+
for i, confp := range confm.ConfigLangs() {
language := confp.Language()
- if confp.IsLangDisabled(language.Lang) {
+ if language.Disabled {
continue
}
k := language.Lang
conf := confm.LanguageConfigMap[k]
-
frontmatterHandler, err := pagemeta.NewFrontmatterHandler(firstSiteDeps.Log, conf.Frontmatter)
if err != nil {
return nil, err
@@ -158,11 +188,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
langs.SetParams(language, conf.Params)
s := &Site{
- conf: conf,
- language: language,
- siteBucket: &pagesMapBucket{
- cascade: conf.Cascade.Config,
- },
+ conf: conf,
+ language: language,
+ languagei: i,
frontmatterHandler: frontmatterHandler,
}
@@ -177,20 +205,9 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
s.Deps = d
}
- // Site deps start.
- var taxonomiesConfig taxonomiesConfig = conf.Taxonomies
- pm := &pageMap{
- contentMap: newContentMap(contentMapConfig{
- lang: k,
- taxonomyConfig: taxonomiesConfig.Values(),
- taxonomyDisabled: !conf.IsKindEnabled(kinds.KindTerm),
- taxonomyTermDisabled: !conf.IsKindEnabled(kinds.KindTaxonomy),
- pageDisabled: !conf.IsKindEnabled(kinds.KindPage),
- }),
- s: s,
- }
+ s.pageMap = newPageMap(i, s, memCache, pageTrees)
- s.PageCollections = newPageCollections(pm)
+ s.pageFinder = newPageFinder(s.pageMap)
s.siteRefLinker, err = newSiteRefLinker(s)
if err != nil {
return nil, err
@@ -217,17 +234,26 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
return nil, errors.New("no sites to build")
}
- // Sort the sites by language weight (if set) or lang.
+ // Pull the default content language to the top, then sort the sites by language weight (if set) or lang.
+ defaultContentLanguage := confm.Base.DefaultContentLanguage
sort.Slice(sites, func(i, j int) bool {
li := sites[i].language
lj := sites[j].language
+ if li.Lang == defaultContentLanguage {
+ return true
+ }
+
+ if lj.Lang == defaultContentLanguage {
+ return false
+ }
+
if li.Weight != lj.Weight {
return li.Weight < lj.Weight
}
return li.Lang < lj.Lang
})
- h, err := newHugoSitesNew(cfg, firstSiteDeps, sites)
+ h, err := newHugoSites(cfg, firstSiteDeps, pageTrees, sites)
if err == nil && h == nil {
panic("hugo: newHugoSitesNew returned nil error and nil HugoSites")
}
@@ -235,29 +261,33 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
return h, err
}
-func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites, error) {
+func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) {
numWorkers := config.GetNumWorkerMultiplier()
- if numWorkers > len(sites) {
- numWorkers = len(sites)
- }
- var workers *para.Workers
- if numWorkers > 1 {
- workers = para.New(numWorkers)
+ numWorkersSite := numWorkers
+ if numWorkersSite > len(sites) {
+ numWorkersSite = len(sites)
}
+ workersSite := para.New(numWorkersSite)
h := &HugoSites{
- Sites: sites,
- Deps: sites[0].Deps,
- Configs: cfg.Configs,
- workers: workers,
- numWorkers: numWorkers,
+ Sites: sites,
+ Deps: sites[0].Deps,
+ Configs: cfg.Configs,
+ workersSite: workersSite,
+ numWorkersSites: numWorkers,
+ numWorkers: numWorkers,
+ pageTrees: pageTrees,
+ cachePages: dynacache.GetOrCreatePartition[string,
+ page.Pages](d.MemCache, "/pags/all",
+ dynacache.OptionsPartition{Weight: 10, ClearWhen: dynacache.ClearOnRebuild},
+ ),
+ translationKeyPages: maps.NewSliceCache[page.Page](),
currentSite: sites[0],
skipRebuildForFilenames: make(map[string]bool),
init: &hugoSitesInit{
- data: lazy.New(),
- layouts: lazy.New(),
- gitInfo: lazy.New(),
- translations: lazy.New(),
+ data: lazy.New(),
+ layouts: lazy.New(),
+ gitInfo: lazy.New(),
},
}
@@ -304,18 +334,8 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites,
donec: make(chan bool),
}
- // Only needed in server mode.
- if cfg.Configs.Base.Internal.Watch {
- h.ContentChanges = &contentChangeMap{
- pathSpec: h.PathSpec,
- symContent: make(map[string]map[string]bool),
- leafBundles: radix.New(),
- branchBundles: make(map[string]bool),
- }
- }
-
h.init.data.Add(func(context.Context) (any, error) {
- err := h.loadData(h.PathSpec.BaseFs.Data.Dirs)
+ err := h.loadData()
if err != nil {
return nil, fmt.Errorf("failed to load data: %w", err)
}
@@ -331,15 +351,6 @@ func newHugoSitesNew(cfg deps.DepsCfg, d *deps.Deps, sites []*Site) (*HugoSites,
return nil, nil
})
- h.init.translations.Add(func(context.Context) (any, error) {
- if len(h.Sites) > 1 {
- allTranslations := pagesToTranslationsMap(h.Sites)
- assignTranslationsToPages(allTranslations, h.Sites)
- }
-
- return nil, nil
- })
-
h.init.gitInfo.Add(func(context.Context) (any, error) {
err := h.loadGitInfo()
if err != nil {
@@ -422,10 +433,16 @@ func (s *Site) BaseURL() string {
}
// Returns the last modification date of the content.
+// Deprecated: Use .Lastmod instead.
func (s *Site) LastChange() time.Time {
return s.lastmod
}
+// Returns the last modification date of the content.
+func (s *Site) Lastmod() time.Time {
+ return s.lastmod
+}
+
// Returns the Params configured for this site.
func (s *Site) Params() maps.Params {
return s.conf.Params
@@ -480,12 +497,55 @@ func (s *Site) LanguagePrefix() string {
return "/" + prefix
}
-// Returns the identity of this site.
-// This is for internal use only.
-func (s *Site) GetIdentity() identity.Identity {
- return identity.KeyValueIdentity{Key: "site", Value: s.Lang()}
-}
-
func (s *Site) Site() page.Site {
return page.WrapSite(s)
}
+
+func (s *Site) ForEeachIdentityByName(name string, f func(identity.Identity) bool) {
+ if id, found := siteidentities.FromString(name); found {
+ if f(id) {
+ return
+ }
+ }
+}
+
+// Pages returns all pages.
+// This is for the current language only.
+func (s *Site) Pages() page.Pages {
+ return s.pageMap.getPagesInSection(
+ pageMapQueryPagesInSection{
+ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
+ Path: "",
+ KeyPart: "global",
+ Include: pagePredicates.ShouldListGlobal,
+ },
+ Recursive: true,
+ IncludeSelf: true,
+ },
+ )
+}
+
+// RegularPages returns all the regular pages.
+// This is for the current language only.
+func (s *Site) RegularPages() page.Pages {
+ return s.pageMap.getPagesInSection(
+ pageMapQueryPagesInSection{
+ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
+ Path: "",
+ KeyPart: "global",
+ Include: pagePredicates.ShouldListGlobal.And(pagePredicates.KindPage),
+ },
+ Recursive: true,
+ },
+ )
+}
+
+// AllPages returns all pages for all sites.
+func (s *Site) AllPages() page.Pages {
+ return s.h.Pages()
+}
+
+// AllRegularPages returns all regular pages for all sites.
+func (s *Site) AllRegularPages() page.Pages {
+ return s.h.RegularPages()
+}
diff --git a/hugolib/site_output.go b/hugolib/site_output.go
index d6f55cbdd9b..2744c01338e 100644
--- a/hugolib/site_output.go
+++ b/hugolib/site_output.go
@@ -42,7 +42,7 @@ func createDefaultOutputFormats(allFormats output.Formats) map[string]output.For
// Below are for consistency. They are currently not used during rendering.
kinds.KindSitemap: {sitemapOut},
kinds.KindRobotsTXT: {robotsOut},
- kinds.Kind404: {htmlOut},
+ kinds.KindStatus404: {htmlOut},
}
// May be disabled
diff --git a/hugolib/site_output_test.go b/hugolib/site_output_test.go
index c2a14c3ebc6..9bcb13ea491 100644
--- a/hugolib/site_output_test.go
+++ b/hugolib/site_output_test.go
@@ -142,7 +142,7 @@ Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.P
s := b.H.Sites[0]
b.Assert(s.language.Lang, qt.Equals, "en")
- home := s.getPage(kinds.KindHome)
+ home := s.getPageOldVersion(kinds.KindHome)
b.Assert(home, qt.Not(qt.IsNil))
@@ -314,7 +314,7 @@ baseName = "customdelimbase"
th.assertFileContent("public/nosuffixbase", "no suffix")
th.assertFileContent("public/customdelimbase_del", "custom delim")
- home := s.getPage(kinds.KindHome)
+ home := s.getPageOldVersion(kinds.KindHome)
c.Assert(home, qt.Not(qt.IsNil))
outputs := home.OutputFormats()
@@ -383,7 +383,7 @@ func TestCreateSiteOutputFormats(t *testing.T) {
c.Assert(outputs[kinds.KindRSS], deepEqualsOutputFormats, output.Formats{output.RSSFormat})
c.Assert(outputs[kinds.KindSitemap], deepEqualsOutputFormats, output.Formats{output.SitemapFormat})
c.Assert(outputs[kinds.KindRobotsTXT], deepEqualsOutputFormats, output.Formats{output.RobotsTxtFormat})
- c.Assert(outputs[kinds.Kind404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat})
+ c.Assert(outputs[kinds.KindStatus404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat})
})
// Issue #4528
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
index 43371b44da9..379dd6e867b 100644
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -20,23 +20,21 @@ import (
"strings"
"sync"
- "github.com/gohugoio/hugo/output/layouts"
+ "github.com/gohugoio/hugo/hugolib/doctree"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/tpl"
- "errors"
-
- "github.com/gohugoio/hugo/output"
-
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
- "github.com/gohugoio/hugo/resources/page/pagemeta"
)
type siteRenderContext struct {
cfg *BuildCfg
+ // languageIdx is the zero based index of the site.
+ languageIdx int
+
// Zero based index for all output formats combined.
sitesOutIdx int
@@ -47,20 +45,24 @@ type siteRenderContext struct {
multihost bool
}
-// Whether to render 404.html, robotsTXT.txt which usually is rendered
-// once only in the site root.
-func (s siteRenderContext) renderSingletonPages() bool {
- if s.multihost {
+// Whether to render 404.html, robotsTXT.txt and similar.
+// These are useually rendered once in the root of public.
+func (s siteRenderContext) shouldRenderStandalonePage(kind string) bool {
+ if s.multihost || kind == kinds.KindSitemap {
// 1 per site
return s.outIdx == 0
}
- // 1 for all sites
- return s.sitesOutIdx == 0
+ if kind == kinds.KindStatus404 {
+ // 1 for all output formats
+ return s.outIdx == 0
+ }
+
+ // 1 for all sites and output formats.
+ return s.languageIdx == 0 && s.outIdx == 0
}
-// renderPages renders pages each corresponding to a markdown file.
-// TODO(bep np doc
+// renderPages renders pages concurrently.
func (s *Site) renderPages(ctx *siteRenderContext) error {
numWorkers := config.GetNumWorkerMultiplier()
@@ -79,18 +81,26 @@ func (s *Site) renderPages(ctx *siteRenderContext) error {
cfg := ctx.cfg
- s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
-
- if cfg.shouldRender(n.p) {
- select {
- case <-s.h.Done():
- return true
- default:
- pages <- n.p
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: s.pageMap.treePages,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if p, ok := n.(*pageState); ok {
+ if cfg.shouldRender(p) {
+ select {
+ case <-s.h.Done():
+ return true, nil
+ default:
+ pages <- p
+ }
+ }
}
- }
- return false
- })
+ return false, nil
+ },
+ }
+
+ if err := w.Walk(context.Background()); err != nil {
+ return err
+ }
close(pages)
@@ -110,10 +120,15 @@ func pageRenderer(
s *Site,
pages <-chan *pageState,
results chan<- error,
- wg *sync.WaitGroup) {
+ wg *sync.WaitGroup,
+) {
defer wg.Done()
for p := range pages {
+ if p.m.isStandalone() && !ctx.shouldRenderStandalonePage(p.Kind()) {
+ continue
+ }
+
if p.m.buildConfig.PublishResources {
if err := p.renderResources(); err != nil {
s.SendError(p.errorf(err, "failed to render page resources"))
@@ -133,13 +148,33 @@ func pageRenderer(
}
if !found {
- s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name)
+ s.Log.Trace(
+ func() string {
+ return fmt.Sprintf("no layout for kind %q found", p.Kind())
+ },
+ )
+ // Don't emit warning for missing 404 etc. pages.
+ if !p.m.isStandalone() {
+ s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name)
+ }
continue
}
targetPath := p.targetPaths().TargetFilename
- if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, templ); err != nil {
+ s.Log.Trace(
+ func() string {
+ return fmt.Sprintf("rendering outputFormat %q kind %q using layout %q to %q", p.pageOutput.f.Name, p.Kind(), templ.Name(), targetPath)
+ },
+ )
+
+ var d any = p
+ switch p.Kind() {
+ case kinds.KindSitemapIndex:
+ d = s.h.Sites
+ }
+
+ if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, d, templ); err != nil {
results <- err
}
@@ -205,6 +240,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, p); err != nil {
return err
}
+
}
// Render pages for the rest
@@ -217,7 +253,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
if err := s.renderAndWritePage(
&s.PathSpec.ProcessingStats.PaginatorPages,
p.Title(),
- targetPaths.TargetFilename, p, templ); err != nil {
+ targetPaths.TargetFilename, p, p, templ); err != nil {
return err
}
@@ -226,160 +262,72 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
return nil
}
-func (s *Site) render404() error {
- p, err := newPageStandalone(&pageMeta{
- s: s,
- kind: kinds.Kind404,
- urlPaths: pagemeta.URLPath{
- URL: "404.html",
- },
- },
- output.HTMLFormat,
- )
- if err != nil {
- return err
- }
-
- if !p.render {
- return nil
- }
-
- var d layouts.LayoutDescriptor
- d.Kind = kinds.Kind404
-
- templ, found, err := s.Tmpl().LookupLayout(d, output.HTMLFormat)
- if err != nil {
- return err
- }
- if !found {
- return nil
- }
-
- targetPath := p.targetPaths().TargetFilename
-
- if targetPath == "" {
- return errors.New("failed to create targetPath for 404 page")
- }
-
- return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, templ)
-}
-
-func (s *Site) renderSitemap() error {
- p, err := newPageStandalone(&pageMeta{
- s: s,
- kind: kinds.KindSitemap,
- urlPaths: pagemeta.URLPath{
- URL: s.conf.Sitemap.Filename,
- },
- },
- output.HTMLFormat,
- )
- if err != nil {
- return err
- }
-
- if !p.render {
- return nil
- }
-
- targetPath := p.targetPaths().TargetFilename
- ctx := tpl.SetPageInContext(context.Background(), p)
-
- if targetPath == "" {
- return errors.New("failed to create targetPath for sitemap")
- }
-
- templ := s.lookupLayouts("sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml")
-
- return s.renderAndWriteXML(ctx, &s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, templ)
-}
-
-func (s *Site) renderRobotsTXT() error {
- if !s.conf.EnableRobotsTXT && s.isEnabled(kinds.KindRobotsTXT) {
- return nil
- }
-
- p, err := newPageStandalone(&pageMeta{
- s: s,
- kind: kinds.KindRobotsTXT,
- urlPaths: pagemeta.URLPath{
- URL: "robots.txt",
- },
- },
- output.RobotsTxtFormat)
- if err != nil {
- return err
- }
-
- if !p.render {
- return nil
- }
-
- templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt")
-
- return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, templ)
-}
-
// renderAliases renders shell pages that simply have a redirect in the header.
func (s *Site) renderAliases() error {
- var err error
- s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool {
- p := n.p
- if len(p.Aliases()) == 0 {
- return false
- }
-
- pathSeen := make(map[string]bool)
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: s.pageMap.treePages,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ p := n.(*pageState)
+
+ // We cannot alias a page that's not rendered.
+ if p.m.noLink() {
+ return false, nil
+ }
- for _, of := range p.OutputFormats() {
- if !of.Format.IsHTML {
- continue
+ if len(p.Aliases()) == 0 {
+ return false, nil
}
- f := of.Format
+ pathSeen := make(map[string]bool)
+ for _, of := range p.OutputFormats() {
+ if !of.Format.IsHTML {
+ continue
+ }
- if pathSeen[f.Path] {
- continue
- }
- pathSeen[f.Path] = true
+ f := of.Format
- plink := of.Permalink()
+ if pathSeen[f.Path] {
+ continue
+ }
+ pathSeen[f.Path] = true
- for _, a := range p.Aliases() {
- isRelative := !strings.HasPrefix(a, "/")
+ plink := of.Permalink()
- if isRelative {
- // Make alias relative, where "." will be on the
- // same directory level as the current page.
- basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..")
- a = path.Join(basePath, a)
+ for _, a := range p.Aliases() {
+ isRelative := !strings.HasPrefix(a, "/")
- } else {
- // Make sure AMP and similar doesn't clash with regular aliases.
- a = path.Join(f.Path, a)
- }
+ if isRelative {
+ // Make alias relative, where "." will be on the
+ // same directory level as the current page.
+ basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..")
+ a = path.Join(basePath, a)
- if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") {
- a += ".html"
- }
+ } else {
+ // Make sure AMP and similar doesn't clash with regular aliases.
+ a = path.Join(f.Path, a)
+ }
- lang := p.Language().Lang
+ if s.conf.C.IsUglyURLSection(p.Section()) && !strings.HasSuffix(a, ".html") {
+ a += ".html"
+ }
- if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) {
- // These need to be in its language root.
- a = path.Join(lang, a)
- }
+ lang := p.Language().Lang
- err = s.writeDestAlias(a, plink, f, p)
- if err != nil {
- return true
+ if s.h.Configs.IsMultihost && !strings.HasPrefix(a, "/"+lang) {
+ // These need to be in its language root.
+ a = path.Join(lang, a)
+ }
+
+ err := s.writeDestAlias(a, plink, f, p)
+ if err != nil {
+ return true, err
+ }
}
}
- }
- return false
- })
-
- return err
+ return false, nil
+ },
+ }
+ return w.Walk(context.TODO())
}
// renderMainLanguageRedirect creates a redirect to the main language home,
diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go
index f5cb41d28fb..4d4ff965b90 100644
--- a/hugolib/site_sections_test.go
+++ b/hugolib/site_sections_test.go
@@ -21,6 +21,7 @@ import (
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
)
@@ -31,8 +32,10 @@ func TestNestedSections(t *testing.T) {
cfg, fs = newTestCfg()
)
+ tt := htesting.NewPinnedRunner(c, "")
+
cfg.Set("permalinks", map[string]string{
- "perm a": ":sections/:title",
+ "perm-a": ":sections/:title",
})
pageTemplate := `---
@@ -127,7 +130,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
{"elsewhere", func(c *qt.C, p page.Page) {
c.Assert(len(p.Pages()), qt.Equals, 1)
for _, p := range p.Pages() {
- c.Assert(p.SectionsPath(), qt.Equals, "elsewhere")
+ c.Assert(p.SectionsPath(), qt.Equals, "/elsewhere")
}
}},
{"post", func(c *qt.C, p page.Page) {
@@ -179,8 +182,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
c.Assert(home.IsHome(), qt.Equals, true)
c.Assert(len(p.Sections()), qt.Equals, 0)
c.Assert(home.CurrentSection(), qt.Equals, home)
- active, err := home.InSection(home)
- c.Assert(err, qt.IsNil)
+ active := home.InSection(home)
c.Assert(active, qt.Equals, true)
c.Assert(p.FirstSection(), qt.Equals, p)
c.Assert(len(p.Ancestors()), qt.Equals, 1)
@@ -208,29 +210,22 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
}
c.Assert(child.CurrentSection(), qt.Equals, p)
- active, err := child.InSection(p)
- c.Assert(err, qt.IsNil)
+ active := child.InSection(p)
c.Assert(active, qt.Equals, true)
- active, err = p.InSection(child)
- c.Assert(err, qt.IsNil)
+ active = p.InSection(child)
c.Assert(active, qt.Equals, true)
- active, err = p.InSection(getPage(p, "/"))
- c.Assert(err, qt.IsNil)
+ active = p.InSection(getPage(p, "/"))
c.Assert(active, qt.Equals, false)
- isAncestor, err := p.IsAncestor(child)
- c.Assert(err, qt.IsNil)
+ isAncestor := p.IsAncestor(child)
c.Assert(isAncestor, qt.Equals, true)
- isAncestor, err = child.IsAncestor(p)
- c.Assert(err, qt.IsNil)
+ isAncestor = child.IsAncestor(p)
c.Assert(isAncestor, qt.Equals, false)
- isDescendant, err := p.IsDescendant(child)
- c.Assert(err, qt.IsNil)
+ isDescendant := p.IsDescendant(child)
c.Assert(isDescendant, qt.Equals, false)
- isDescendant, err = child.IsDescendant(p)
- c.Assert(err, qt.IsNil)
+ isDescendant = child.IsDescendant(p)
c.Assert(isDescendant, qt.Equals, true)
}
@@ -254,32 +249,26 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
c.Assert(len(p.Ancestors()), qt.Equals, 3)
l1 := getPage(p, "/l1")
- isDescendant, err := l1.IsDescendant(p)
- c.Assert(err, qt.IsNil)
+ isDescendant := l1.IsDescendant(p)
c.Assert(isDescendant, qt.Equals, false)
- isDescendant, err = l1.IsDescendant(nil)
- c.Assert(err, qt.IsNil)
+ isDescendant = l1.IsDescendant(nil)
c.Assert(isDescendant, qt.Equals, false)
- isDescendant, err = nilp.IsDescendant(p)
- c.Assert(err, qt.IsNil)
+ isDescendant = nilp.IsDescendant(p)
c.Assert(isDescendant, qt.Equals, false)
- isDescendant, err = p.IsDescendant(l1)
- c.Assert(err, qt.IsNil)
+ isDescendant = p.IsDescendant(l1)
c.Assert(isDescendant, qt.Equals, true)
- isAncestor, err := l1.IsAncestor(p)
- c.Assert(err, qt.IsNil)
+ isAncestor := l1.IsAncestor(p)
c.Assert(isAncestor, qt.Equals, true)
- isAncestor, err = p.IsAncestor(l1)
- c.Assert(err, qt.IsNil)
+ isAncestor = p.IsAncestor(l1)
c.Assert(isAncestor, qt.Equals, false)
c.Assert(p.FirstSection(), qt.Equals, l1)
- isAncestor, err = p.IsAncestor(nil)
- c.Assert(err, qt.IsNil)
+ isAncestor = p.IsAncestor(nil)
c.Assert(isAncestor, qt.Equals, false)
- isAncestor, err = nilp.IsAncestor(l1)
- c.Assert(err, qt.IsNil)
c.Assert(isAncestor, qt.Equals, false)
+
+ l3 := getPage(p, "/l1/l2/l3")
+ c.Assert(l3.FirstSection(), qt.Equals, l1)
}},
{"perm a,link", func(c *qt.C, p page.Page) {
c.Assert(p.Title(), qt.Equals, "T9_-1")
@@ -294,15 +283,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
}},
}
- home := s.getPage(kinds.KindHome)
+ home := s.getPageOldVersion(kinds.KindHome)
for _, test := range tests {
test := test
- t.Run(fmt.Sprintf("sections %s", test.sections), func(t *testing.T) {
- t.Parallel()
- c := qt.New(t)
+ tt.Run(fmt.Sprintf("sections %s", test.sections), func(c *qt.C) {
+ c.Parallel()
sections := strings.Split(test.sections, ",")
- p := s.getPage(kinds.KindSection, sections...)
+ p := s.getPageOldVersion(kinds.KindSection, sections...)
c.Assert(p, qt.Not(qt.IsNil), qt.Commentf(fmt.Sprint(sections)))
if p.Pages() != nil {
@@ -319,19 +307,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
c.Assert(len(home.Sections()), qt.Equals, 9)
c.Assert(s.Sections(), deepEqualsPages, home.Sections())
- rootPage := s.getPage(kinds.KindPage, "mypage.md")
+ rootPage := s.getPageOldVersion(kinds.KindPage, "mypage.md")
c.Assert(rootPage, qt.Not(qt.IsNil))
c.Assert(rootPage.Parent().IsHome(), qt.Equals, true)
// https://github.com/gohugoio/hugo/issues/6365
c.Assert(rootPage.Sections(), qt.HasLen, 0)
- // Add a odd test for this as this looks a little bit off, but I'm not in the mood
- // to think too hard a out this right now. It works, but people will have to spell
- // out the directory name as is.
- // If we later decide to do something about this, we will have to do some normalization in
- // getPage.
- // TODO(bep)
- sectionWithSpace := s.getPage(kinds.KindSection, "Spaces in Section")
+ sectionWithSpace := s.getPageOldVersion(kinds.KindSection, "Spaces in Section")
+ // s.h.pageTrees.debugPrint()
c.Assert(sectionWithSpace, qt.Not(qt.IsNil))
c.Assert(sectionWithSpace.RelPermalink(), qt.Equals, "/spaces-in-section/")
@@ -381,3 +364,37 @@ Next: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}|
b.AssertFileContent("public/blog/cool/cool2/index.html",
"Prev: |", "Next: /blog/cool/cool1/|")
}
+
+func TestSectionEntries(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- hugo.toml --
+baseURL = "https://example.com/"
+-- content/myfirstsection/p1.md --
+---
+title: "P1"
+---
+P1
+-- content/a/b/c/_index.md --
+---
+title: "C"
+---
+C
+-- content/a/b/c/mybundle/index.md --
+---
+title: "My Bundle"
+---
+-- layouts/_default/list.html --
+Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}|
+-- layouts/_default/single.html --
+Kind: {{ .Kind }}|RelPermalink: {{ .RelPermalink }}|SectionsPath: {{ .SectionsPath }}|SectionsEntries: {{ .SectionsEntries }}|Len: {{ len .SectionsEntries }}|
+`
+
+ b := Test(t, files)
+
+ b.AssertFileContent("public/myfirstsection/p1/index.html", "RelPermalink: /myfirstsection/p1/|SectionsPath: /myfirstsection|SectionsEntries: [myfirstsection]|Len: 1")
+ b.AssertFileContent("public/a/b/c/index.html", "RelPermalink: /a/b/c/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3")
+ b.AssertFileContent("public/a/b/c/mybundle/index.html", "Kind: page|RelPermalink: /a/b/c/mybundle/|SectionsPath: /a/b/c|SectionsEntries: [a b c]|Len: 3")
+ b.AssertFileContent("public/index.html", "Kind: home|RelPermalink: /|SectionsPath: /|SectionsEntries: []|Len: 0")
+}
diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go
index 4ed6411d212..167194ef580 100644
--- a/hugolib/site_stats_test.go
+++ b/hugolib/site_stats_test.go
@@ -94,5 +94,38 @@ aliases: [/Ali%d]
helpers.ProcessingStatsTable(&buff, stats...)
- c.Assert(buff.String(), qt.Contains, "Pages | 19 | 6")
+ c.Assert(buff.String(), qt.Contains, "Pages | 21 | 7")
+}
+
+func TestSiteLastmod(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- hugo.toml --
+baseURL = "https://example.com/"
+-- content/_index.md --
+---
+date: 2023-01-01
+---
+-- content/posts/_index.md --
+---
+date: 2023-02-01
+---
+-- content/posts/post-1.md --
+---
+date: 2023-03-01
+---
+-- content/posts/post-2.md --
+---
+date: 2023-04-01
+---
+-- layouts/index.html --
+site.Lastmod: {{ .Site.Lastmod.Format "2006-01-02" }}
+site.LastChange: {{ .Site.LastChange.Format "2006-01-02" }}
+home.Lastmod: {{ site.Home.Lastmod.Format "2006-01-02" }}
+
+`
+ b := Test(t, files)
+
+ b.AssertFileContent("public/index.html", "site.Lastmod: 2023-04-01\nsite.LastChange: 2023-04-01\nhome.Lastmod: 2023-01-01")
}
diff --git a/hugolib/site_test.go b/hugolib/site_test.go
index 57c6bbabef1..cf0d4a032a8 100644
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -419,7 +419,6 @@ Main section page: {{ .RelPermalink }}
}
func TestMainSectionsMoveToSite(t *testing.T) {
-
t.Run("defined in params", func(t *testing.T) {
t.Parallel()
@@ -510,7 +509,6 @@ MainSections Params: [mysect]|
MainSections Site method: [mysect]|
`)
})
-
}
// Issue #1176
@@ -718,7 +716,7 @@ func TestOrderedPages(t *testing.T) {
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Configs: configs}, BuildCfg{SkipRender: true})
- if s.getPage(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPage(kinds.KindSection, "sect").Pages()[2].Title() != "Four" {
+ if s.getPageOldVersion(kinds.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPageOldVersion(kinds.KindSection, "sect").Pages()[2].Title() != "Four" {
t.Error("Pages in unexpected order.")
}
@@ -1011,7 +1009,7 @@ func TestRefLinking(t *testing.T) {
t.Parallel()
site := setupLinkingMockSite(t)
- currentPage := site.getPage(kinds.KindPage, "level2/level3/start.md")
+ currentPage := site.getPageOldVersion(kinds.KindPage, "level2/level3/start.md")
if currentPage == nil {
t.Fatalf("failed to find current page in site")
}
@@ -1071,7 +1069,7 @@ func TestRefLinking(t *testing.T) {
func checkLinkCase(site *Site, link string, currentPage page.Page, relative bool, outputFormat string, expected string, t *testing.T, i int) {
t.Helper()
if out, err := site.refLink(link, currentPage, relative, outputFormat); err != nil || out != expected {
- t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Pathc(), expected, out, err)
+ t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Path(), expected, out, err)
}
}
@@ -1199,7 +1197,7 @@ writeStats = true
writeStats = false
`)
- b.AssertDestinationExists("hugo_stats.json", false)
+ b.AssertFileExists("public/hugo_stats.json", false)
b = r(`
[build.buildStats]
@@ -1245,8 +1243,7 @@ disableclasses = true
[build.buildStats]
enable = false
`)
- b.AssertDestinationExists("hugo_stats.json", false)
-
+ b.AssertFileExists("public/hugo_stats.json", false)
}
func TestClassCollectorStress(t *testing.T) {
diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go
index fd15eb5d32e..2cc5328546a 100644
--- a/hugolib/site_url_test.go
+++ b/hugolib/site_url_test.go
@@ -23,20 +23,6 @@ import (
"github.com/gohugoio/hugo/resources/kinds"
)
-const slugDoc1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - /sd1/foo/\n - /sd2\n - /sd3/\n - /sd4.html\n---\nslug doc 1 content\n"
-
-const slugDoc2 = `---
-title: slug doc 2
-slug: slug-doc-2
----
-slug doc 2 content
-`
-
-var urlFakeSource = [][2]string{
- {filepath.FromSlash("content/blue/doc1.md"), slugDoc1},
- {filepath.FromSlash("content/blue/doc2.md"), slugDoc2},
-}
-
func TestUglyURLsPerSection(t *testing.T) {
t.Parallel()
@@ -67,12 +53,12 @@ Do not go gentle into that good night.
c.Assert(len(s.RegularPages()), qt.Equals, 2)
- notUgly := s.getPage(kinds.KindPage, "sect1/p1.md")
+ notUgly := s.getPageOldVersion(kinds.KindPage, "sect1/p1.md")
c.Assert(notUgly, qt.Not(qt.IsNil))
c.Assert(notUgly.Section(), qt.Equals, "sect1")
c.Assert(notUgly.RelPermalink(), qt.Equals, "/sect1/p1/")
- ugly := s.getPage(kinds.KindPage, "sect2/p2.md")
+ ugly := s.getPageOldVersion(kinds.KindPage, "sect2/p2.md")
c.Assert(ugly, qt.Not(qt.IsNil))
c.Assert(ugly.Section(), qt.Equals, "sect2")
c.Assert(ugly.RelPermalink(), qt.Equals, "/sect2/p2.html")
@@ -124,7 +110,7 @@ Do not go gentle into that good night.
c.Assert(len(s.RegularPages()), qt.Equals, 10)
- sect1 := s.getPage(kinds.KindSection, "sect1")
+ sect1 := s.getPageOldVersion(kinds.KindSection, "sect1")
c.Assert(sect1, qt.Not(qt.IsNil))
c.Assert(sect1.RelPermalink(), qt.Equals, "/ss1/")
th.assertFileContent(filepath.Join("public", "ss1", "index.html"), "P1|URL: /ss1/|Next: /ss1/page/2/")
diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go
index aae874d502f..be13ba1f4bb 100644
--- a/hugolib/sitemap_test.go
+++ b/hugolib/sitemap_test.go
@@ -17,67 +17,107 @@ import (
"reflect"
"testing"
- qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/config"
- "github.com/gohugoio/hugo/deps"
)
-const sitemapTemplate = `\n
|