From da9dacd1cfaf2c8cccef10cf085bd0ee3a9f7849 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Wed, 2 Jan 2019 12:33:26 +0100 Subject: [PATCH] Add a Page interface The main motivation of this commit is to add a `page.Page` interface to replace the old very file-oriented `hugolib.Page` struct to prepare for #5074, "pages from other data sources". But this also fixes a set of annoying limiations, especially related to custom output formats, and shortcodes. And it's faster. See #5074 Fixes #5090 Fixes #5204 Fixes #4695 Fixes #5607 Fixes #5704 Fixes #5707 Fixes #5719 Fixes #3113 Fixes #5706 --- benchbep.sh | 2 + codegen/methods.go | 529 ++++ .../methods2_test.go | 17 +- codegen/methods_test.go | 100 + commands/commandeer.go | 4 + commands/commands.go | 18 +- commands/commands_test.go | 6 +- commands/convert.go | 20 +- commands/hugo.go | 167 +- commands/import_jekyll.go | 8 +- commands/list.go | 11 +- commands/new_content_test.go | 10 +- commands/server.go | 8 +- common/collections/append.go | 4 +- common/collections/slice_test.go | 4 +- common/hugio/readers.go | 1 + common/maps/scratch.go | 18 + common/types/types.go | 16 + config/configProvider.go | 12 + config/services/servicesConfig.go | 12 + {hugolib => config}/sitemap.go | 15 +- create/content.go | 6 +- deps/deps.go | 7 +- docs/content/en/variables/page.md | 15 +- go.mod | 1 - go.sum | 2 - helpers/content.go | 6 +- helpers/content_renderer_test.go | 4 +- helpers/content_test.go | 22 +- helpers/general.go | 5 +- helpers/general_test.go | 23 +- helpers/path.go | 7 + helpers/pygments.go | 2 +- htesting/test_structs.go | 51 +- hugofs/createcounting_fs.go | 99 + hugofs/fs.go | 6 + hugofs/hashing_fs.go | 4 - hugolib/alias.go | 27 +- hugolib/alias_test.go | 2 +- hugolib/collections.go | 73 +- hugolib/collections_test.go | 16 +- hugolib/config.go | 21 +- hugolib/datafiles_test.go | 4 +- hugolib/disableKinds_test.go | 38 +- hugolib/embedded_shortcodes_test.go | 8 +- hugolib/gitinfo.go | 12 +- hugolib/hugo_sites.go | 520 ++-- hugolib/hugo_sites_build.go | 178 +- hugolib/hugo_sites_build_errors_test.go | 123 +- hugolib/hugo_sites_build_test.go | 411 +-- hugolib/hugo_sites_multihost_test.go | 26 +- hugolib/hugo_smoke_test.go | 303 ++ hugolib/language_content_dir_test.go | 42 +- hugolib/media.go | 60 - hugolib/menu_test.go | 6 +- hugolib/minify_publisher_test.go | 18 +- hugolib/multilingual.go | 14 +- hugolib/orderedMap.go | 99 - hugolib/orderedMap_test.go | 69 - hugolib/page.go | 2466 ++++------------- hugolib/page__common.go | 112 + hugolib/page__data.go | 70 + hugolib/page__menus.go | 74 + hugolib/page__meta.go | 644 +++++ hugolib/page__new.go | 289 ++ hugolib/page__output.go | 107 + hugolib/page__paths.go | 148 + hugolib/page__per_output.go | 427 +++ hugolib/{page_ref.go => page__ref.go} | 61 +- hugolib/page__tree.go | 113 + hugolib/page_content.go | 245 +- hugolib/page_errors.go | 47 - hugolib/page_kinds.go | 40 + hugolib/page_output.go | 320 --- hugolib/page_pagination.go | 83 + hugolib/page_paths.go | 312 --- hugolib/page_paths_test.go | 194 -- hugolib/page_permalink_test.go | 42 +- hugolib/page_position.go | 76 + hugolib/page_taxonomy_test.go | 96 - hugolib/page_test.go | 862 +----- hugolib/page_time_integration_test.go | 183 -- hugolib/page_unwrap.go | 50 + ...separators_test.go => page_unwrap_test.go} | 31 +- hugolib/page_without_content.go | 67 - hugolib/pagebundler.go | 8 +- hugolib/pagebundler_handlers.go | 121 +- hugolib/pagebundler_test.go | 221 +- hugolib/pagecollections.go | 279 +- hugolib/pagecollections_test.go | 84 +- hugolib/pages_language_merge_test.go | 40 +- hugolib/pages_related_test.go | 75 - hugolib/pagination_test.go | 579 ---- hugolib/paths/themes.go | 2 +- hugolib/permalinker.go | 3 +- hugolib/permalinks.go | 213 -- hugolib/permalinks_test.go | 85 - hugolib/resource_chain_test.go | 6 +- hugolib/rss_test.go | 2 +- hugolib/shortcode.go | 470 +--- hugolib/shortcode_page.go | 56 + hugolib/shortcode_test.go | 369 +-- hugolib/site.go | 1086 ++++---- hugolib/siteJSONEncode_test.go | 32 +- hugolib/site_output.go | 13 +- hugolib/site_output_test.go | 40 +- hugolib/site_render.go | 406 +-- hugolib/site_sections.go | 271 +- hugolib/site_sections_test.go | 148 +- hugolib/site_test.go | 121 +- hugolib/site_url_test.go | 12 +- hugolib/sitemap_test.go | 8 +- hugolib/taxonomy.go | 149 +- hugolib/taxonomy_test.go | 115 +- hugolib/testhelpers_test.go | 120 +- hugolib/translations.go | 54 +- lazy/init.go | 199 ++ lazy/init_test.go | 150 + lazy/once.go | 69 + magefile.go | 35 +- media/mediaType.go | 7 + media/mediaType_test.go | 2 +- {hugolib => navigation}/menu.go | 41 +- navigation/pagemenus.go | 240 ++ output/outputFormat.go | 33 +- output/outputFormat_test.go | 3 + parser/pageparser/itemtype_string.go | 16 + parser/pageparser/pageparser.go | 25 +- parser/pageparser/pageparser_test.go | 4 +- publisher/publisher.go | 2 +- related/inverted_index.go | 16 +- related/inverted_index_test.go | 4 +- resources/image.go | 14 +- resources/image_cache.go | 31 +- resources/page/page.go | 361 +++ .../page/page_author.go | 4 +- resources/page/page_data.go | 42 + resources/page/page_data_test.go | 57 + resources/page/page_generate/.gitignore | 1 + .../page_generate/generate_page_wrappers.go | 212 ++ .../site.go => resources/page/page_kinds.go | 21 +- resources/page/page_kinds_test.go | 31 + resources/page/page_marshaljson.autogen.go | 192 ++ resources/page/page_nop.go | 463 ++++ resources/page/page_outputformat.go | 85 + resources/page/page_paths.go | 331 +++ resources/page/page_paths_test.go | 251 ++ resources/page/page_wrappers.autogen.go | 105 + .../page/pagegroup.go | 135 +- .../page/pagegroup_test.go | 96 +- .../page}/pagemeta/page_frontmatter.go | 13 +- .../page}/pagemeta/page_frontmatter_test.go | 39 +- .../page}/pagemeta/pagemeta.go | 11 - resources/page/pages.go | 115 + .../page/pages_cache.go | 2 +- .../page/pages_cache_test.go | 12 +- .../page}/pages_language_merge.go | 6 +- .../page/pages_prev_next.go | 12 +- .../page/pages_prev_next_test.go | 19 +- {hugolib => resources/page}/pages_related.go | 52 +- resources/page/pages_related_test.go | 86 + .../page/pages_sort.go | 106 +- .../page/pages_sort_test.go | 116 +- {hugolib => resources/page}/pagination.go | 267 +- resources/page/pagination_test.go | 307 ++ resources/page/permalinks.go | 248 ++ resources/page/permalinks_test.go | 180 ++ resources/page/site.go | 53 + resources/page/testhelpers_test.go | 554 ++++ resources/page/weighted.go | 140 + resources/resource.go | 67 +- resources/resource/dates.go | 81 + resources/resource/params.go | 89 + resources/resource/resource_helpers.go | 70 + resources/resource/resourcetypes.go | 98 +- resources/resource_metadata.go | 1 - resources/resource_metadata_test.go | 4 +- resources/resource_test.go | 6 +- resources/testhelpers_test.go | 19 +- resources/transform.go | 4 +- source/fileInfo.go | 43 +- tpl/collections/collections.go | 6 +- tpl/collections/collections_test.go | 36 +- tpl/template.go | 29 +- tpl/tplimpl/ace.go | 8 +- tpl/tplimpl/embedded/generate/generate.go | 2 +- tpl/tplimpl/embedded/templates.autogen.go | 24 +- .../embedded/templates/_default/rss.xml | 8 +- .../embedded/templates/_default/sitemap.xml | 5 +- .../templates/_default/sitemapindex.xml | 1 + tpl/tplimpl/embedded/templates/disqus.html | 2 +- tpl/tplimpl/shortcodes.go | 122 + tpl/tplimpl/shortcodes_test.go | 51 + tpl/tplimpl/template.go | 121 +- 194 files changed, 13088 insertions(+), 9084 deletions(-) create mode 100755 benchbep.sh create mode 100644 codegen/methods.go rename hugolib/page_resource.go => codegen/methods2_test.go (70%) create mode 100644 codegen/methods_test.go rename {hugolib => config}/sitemap.go (73%) create mode 100644 hugofs/createcounting_fs.go create mode 100644 hugolib/hugo_smoke_test.go delete mode 100644 hugolib/media.go delete mode 100644 hugolib/orderedMap.go delete mode 100644 hugolib/orderedMap_test.go create mode 100644 hugolib/page__common.go create mode 100644 hugolib/page__data.go create mode 100644 hugolib/page__menus.go create mode 100644 hugolib/page__meta.go create mode 100644 hugolib/page__new.go create mode 100644 hugolib/page__output.go create mode 100644 hugolib/page__paths.go create mode 100644 hugolib/page__per_output.go rename hugolib/{page_ref.go => page__ref.go} (56%) create mode 100644 hugolib/page__tree.go delete mode 100644 hugolib/page_errors.go create mode 100644 hugolib/page_kinds.go delete mode 100644 hugolib/page_output.go create mode 100644 hugolib/page_pagination.go delete mode 100644 hugolib/page_paths.go delete mode 100644 hugolib/page_paths_test.go create mode 100644 hugolib/page_position.go delete mode 100644 hugolib/page_taxonomy_test.go delete mode 100644 hugolib/page_time_integration_test.go create mode 100644 hugolib/page_unwrap.go rename hugolib/{path_separators_test.go => page_unwrap_test.go} (57%) delete mode 100644 hugolib/page_without_content.go delete mode 100644 hugolib/pages_related_test.go delete mode 100644 hugolib/pagination_test.go delete mode 100644 hugolib/permalinks.go delete mode 100644 hugolib/permalinks_test.go create mode 100644 hugolib/shortcode_page.go create mode 100644 lazy/init.go create mode 100644 lazy/init_test.go create mode 100644 lazy/once.go rename {hugolib => navigation}/menu.go (89%) create mode 100644 navigation/pagemenus.go create mode 100644 parser/pageparser/itemtype_string.go create mode 100644 resources/page/page.go rename hugolib/author.go => resources/page/page_author.go (94%) create mode 100644 resources/page/page_data.go create mode 100644 resources/page/page_data_test.go create mode 100644 resources/page/page_generate/.gitignore create mode 100644 resources/page/page_generate/generate_page_wrappers.go rename common/hugo/site.go => resources/page/page_kinds.go (61%) create mode 100644 resources/page/page_kinds_test.go create mode 100644 resources/page/page_marshaljson.autogen.go create mode 100644 resources/page/page_nop.go create mode 100644 resources/page/page_outputformat.go create mode 100644 resources/page/page_paths.go create mode 100644 resources/page/page_paths_test.go create mode 100644 resources/page/page_wrappers.autogen.go rename hugolib/pageGroup.go => resources/page/pagegroup.go (71%) rename hugolib/pageGroup_test.go => resources/page/pagegroup_test.go (83%) rename {hugolib => resources/page}/pagemeta/page_frontmatter.go (98%) rename {hugolib => resources/page}/pagemeta/page_frontmatter_test.go (88%) rename {hugolib => resources/page}/pagemeta/pagemeta.go (83%) create mode 100644 resources/page/pages.go rename hugolib/pageCache.go => resources/page/pages_cache.go (99%) rename hugolib/pageCache_test.go => resources/page/pages_cache_test.go (88%) rename {hugolib => resources/page}/pages_language_merge.go (94%) rename hugolib/pagesPrevNext.go => resources/page/pages_prev_next.go (77%) rename hugolib/pagesPrevNext_test.go => resources/page/pages_prev_next_test.go (85%) rename {hugolib => resources/page}/pages_related.go (81%) create mode 100644 resources/page/pages_related_test.go rename hugolib/pageSort.go => resources/page/pages_sort.go (77%) rename hugolib/pageSort_test.go => resources/page/pages_sort_test.go (72%) rename {hugolib => resources/page}/pagination.go (56%) create mode 100644 resources/page/pagination_test.go create mode 100644 resources/page/permalinks.go create mode 100644 resources/page/permalinks_test.go create mode 100644 resources/page/site.go create mode 100644 resources/page/testhelpers_test.go create mode 100644 resources/page/weighted.go create mode 100644 resources/resource/dates.go create mode 100644 resources/resource/params.go create mode 100644 resources/resource/resource_helpers.go create mode 100644 tpl/tplimpl/shortcodes.go create mode 100644 tpl/tplimpl/shortcodes_test.go diff --git a/benchbep.sh b/benchbep.sh new file mode 100755 index 00000000000..e94cc4e6308 --- /dev/null +++ b/benchbep.sh @@ -0,0 +1,2 @@ +gobench -package=./hugolib -bench="BenchmarkSiteBuilding/TOML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench +benchcmp -best 0.bench 1.bench \ No newline at end of file diff --git a/codegen/methods.go b/codegen/methods.go new file mode 100644 index 00000000000..007384f9b62 --- /dev/null +++ b/codegen/methods.go @@ -0,0 +1,529 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// Some functions in this file (see comments) is based on the Go source code, +// copyright The Go Authors and governed by a BSD-style license. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package codegen contains helpers for code generation. +package codegen + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path" + "path/filepath" + "reflect" + "regexp" + "sort" + "strings" + "sync" +) + +// Make room for insertions +const weightWidth = 1000 + +// NewInspector creates a new Inspector given a source root. +func NewInspector(root string) *Inspector { + return &Inspector{ProjectRootDir: root} +} + +// Inspector provides methods to help code generation. It uses a combination +// of reflection and source code AST to do the heavy lifting. +type Inspector struct { + ProjectRootDir string + + init sync.Once + + // Determines method order. Go's reflect sorts lexicographically, so + // we must parse the source to preserve this order. + methodWeight map[string]map[string]int +} + +// MethodsFromTypes create a method set from the include slice, excluding any +// method in exclude. +func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.Type) Methods { + c.parseSource() + + var methods Methods + + var excludes = make(map[string]bool) + + if len(exclude) > 0 { + for _, m := range c.MethodsFromTypes(exclude, nil) { + excludes[m.Name] = true + } + } + + // There may be overlapping interfaces in types. Do a simple check for now. + seen := make(map[string]bool) + + nameAndPackage := func(t reflect.Type) (string, string) { + var name, pkg string + + isPointer := t.Kind() == reflect.Ptr + + if isPointer { + t = t.Elem() + } + + pkgPrefix := "" + if pkgPath := t.PkgPath(); pkgPath != "" { + pkgPath = strings.TrimSuffix(pkgPath, "/") + _, shortPath := path.Split(pkgPath) + pkgPrefix = shortPath + "." + pkg = pkgPath + } + + name = t.Name() + if name == "" { + // interface{} + name = t.String() + } + + if isPointer { + pkgPrefix = "*" + pkgPrefix + } + + name = pkgPrefix + name + + return name, pkg + + } + + for _, t := range include { + + for i := 0; i < t.NumMethod(); i++ { + + m := t.Method(i) + if excludes[m.Name] || seen[m.Name] { + continue + } + + seen[m.Name] = true + + if m.PkgPath != "" { + // Not exported + continue + } + + numIn := m.Type.NumIn() + + ownerName, _ := nameAndPackage(t) + + method := Method{Owner: t, OwnerName: ownerName, Name: m.Name} + + for i := 0; i < numIn; i++ { + in := m.Type.In(i) + + name, pkg := nameAndPackage(in) + + if pkg != "" { + method.Imports = append(method.Imports, pkg) + } + + method.In = append(method.In, name) + } + + numOut := m.Type.NumOut() + + if numOut > 0 { + for i := 0; i < numOut; i++ { + out := m.Type.Out(i) + name, pkg := nameAndPackage(out) + + if pkg != "" { + method.Imports = append(method.Imports, pkg) + } + + method.Out = append(method.Out, name) + } + } + + methods = append(methods, method) + } + + } + + sort.SliceStable(methods, func(i, j int) bool { + mi, mj := methods[i], methods[j] + + wi := c.methodWeight[mi.OwnerName][mi.Name] + wj := c.methodWeight[mj.OwnerName][mj.Name] + + if wi == wj { + return mi.Name < mj.Name + } + + return wi < wj + + }) + + return methods + +} + +func (c *Inspector) parseSource() { + c.init.Do(func() { + + if !strings.Contains(c.ProjectRootDir, "hugo") { + panic("dir must be set to the Hugo root") + } + + c.methodWeight = make(map[string]map[string]int) + dirExcludes := regexp.MustCompile("docs|examples") + fileExcludes := regexp.MustCompile("autogen") + var filenames []string + + filepath.Walk(c.ProjectRootDir, func(path string, info os.FileInfo, err error) error { + if info.IsDir() { + if dirExcludes.MatchString(info.Name()) { + return filepath.SkipDir + } + } + + if !strings.HasSuffix(path, ".go") || fileExcludes.MatchString(path) { + return nil + } + + filenames = append(filenames, path) + + return nil + + }) + + for _, filename := range filenames { + + pkg := c.packageFromPath(filename) + + fset := token.NewFileSet() + node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) + if err != nil { + panic(err) + } + + ast.Inspect(node, func(n ast.Node) bool { + switch t := n.(type) { + case *ast.TypeSpec: + if t.Name.IsExported() { + switch it := t.Type.(type) { + case *ast.InterfaceType: + iface := pkg + "." + t.Name.Name + methodNames := collectMethodsRecursive(pkg, it.Methods.List) + weights := make(map[string]int) + weight := weightWidth + for _, name := range methodNames { + weights[name] = weight + weight += weightWidth + } + c.methodWeight[iface] = weights + } + } + + } + return true + }) + + } + + // Complement + for _, v1 := range c.methodWeight { + for k2, w := range v1 { + if v, found := c.methodWeight[k2]; found { + for k3, v3 := range v { + v1[k3] = (v3 / weightWidth) + w + } + } + } + } + + }) +} + +func (c *Inspector) packageFromPath(p string) string { + p = filepath.ToSlash(p) + base := path.Base(p) + if !strings.Contains(base, ".") { + return base + } + return path.Base(strings.TrimSuffix(p, base)) +} + +// Method holds enough information about it to recreate it. +type Method struct { + // The interface we extracted this method from. + Owner reflect.Type + + // String version of the above, on the form PACKAGE.NAME, e.g. + // page.Page + OwnerName string + + // Method name. + Name string + + // Imports needed to satisfy the method signature. + Imports []string + + // Argument types, including any package prefix, e.g. string, int, interface{}, + // net.Url + In []string + + // Return types. + Out []string +} + +// Declaration creates a method declaration (without any body) for the given receiver. +func (m Method) Declaration(receiver string) string { + return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStr()) +} + +// Delegate creates a delegate call string. +func (m Method) Delegate(receiver, delegate string) string { + ret := "" + if len(m.Out) > 0 { + ret = "return " + } + return fmt.Sprintf("%s%s.%s.%s%s", ret, receiverShort(receiver), delegate, m.Name, m.inOutStr()) +} + +func (m Method) String() string { + return m.Name + m.inStr() + " " + m.outStr() + "\n" +} + +func (m Method) inOutStr() string { + if len(m.In) == 0 { + return "()" + } + + args := make([]string, len(m.In)) + for i := 0; i < len(args); i++ { + args[i] = fmt.Sprintf("arg%d", i) + } + return "(" + strings.Join(args, ", ") + ")" +} + +func (m Method) inStr() string { + if len(m.In) == 0 { + return "()" + } + + args := make([]string, len(m.In)) + for i := 0; i < len(args); i++ { + args[i] = fmt.Sprintf("arg%d %s", i, m.In[i]) + } + return "(" + strings.Join(args, ", ") + ")" +} + +func (m Method) outStr() string { + if len(m.Out) == 0 { + return "" + } + if len(m.Out) == 1 { + return m.Out[0] + } + + return "(" + strings.Join(m.Out, ", ") + ")" +} + +// Methods represents a list of methods for one or more interfaces. +// The order matches the defined order in their source file(s). +type Methods []Method + +// Imports returns a sorted list of package imports needed to satisfy the +// signatures of all methods. +func (m Methods) Imports() []string { + var pkgImports []string + for _, method := range m { + pkgImports = append(pkgImports, method.Imports...) + } + if len(pkgImports) > 0 { + pkgImports = uniqueNonEmptyStrings(pkgImports) + sort.Strings(pkgImports) + } + return pkgImports +} + +// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name +// matchin any of the regexps in excludes will be ignored. +func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) { + var sb strings.Builder + + r := receiverShort(receiver) + what := firstToUpper(trimAsterisk(receiver)) + pgkName := path.Base(pkgPath) + + fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver) + + var methods Methods + var excludeRes = make([]*regexp.Regexp, len(excludes)) + + for i, exclude := range excludes { + excludeRes[i] = regexp.MustCompile(exclude) + } + + for _, method := range m { + // Exclude methods with arguments and incompatible return values + if len(method.In) > 0 || len(method.Out) == 0 || len(method.Out) > 2 { + continue + } + + if len(method.Out) == 2 { + if method.Out[1] != "error" { + continue + } + } + + for _, re := range excludeRes { + if re.MatchString(method.Name) { + continue + } + } + + methods = append(methods, method) + } + + for _, method := range methods { + varn := varName(method.Name) + if len(method.Out) == 1 { + fmt.Fprintf(&sb, "\t%s := %s.%s()\n", varn, r, method.Name) + } else { + fmt.Fprintf(&sb, "\t%s, err := %s.%s()\n", varn, r, method.Name) + fmt.Fprint(&sb, "\tif err != nil {\n\t\treturn nil, err\n\t}\n") + } + } + + fmt.Fprint(&sb, "\n\ts := struct {\n") + + for _, method := range methods { + fmt.Fprintf(&sb, "\t\t%s %s\n", method.Name, typeName(method.Out[0], pgkName)) + } + + fmt.Fprint(&sb, "\n\t}{\n") + + for _, method := range methods { + varn := varName(method.Name) + fmt.Fprintf(&sb, "\t\t%s: %s,\n", method.Name, varn) + } + + fmt.Fprint(&sb, "\n\t}\n\n") + fmt.Fprint(&sb, "\treturn json.Marshal(&s)\n}") + + pkgImports := append(methods.Imports(), "encoding/json") + + if pkgPath != "" { + // Exclude self + for i, pkgImp := range pkgImports { + if pkgImp == pkgPath { + pkgImports = append(pkgImports[:i], pkgImports[i+1:]...) + } + } + } + + return sb.String(), pkgImports + +} + +func collectMethodsRecursive(pkg string, f []*ast.Field) []string { + var methodNames []string + for _, m := range f { + if m.Names != nil { + methodNames = append(methodNames, m.Names[0].Name) + continue + } + + if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil { + // Embedded interface + methodNames = append( + methodNames, + collectMethodsRecursive( + pkg, + ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...) + } else { + // Embedded, but in a different file/package. Return the + // package.Name and deal with that later. + name := packageName(m.Type) + if !strings.Contains(name, ".") { + // Assume current package + name = pkg + "." + name + } + methodNames = append(methodNames, name) + } + } + + return methodNames + +} + +func firstToLower(name string) string { + return strings.ToLower(name[:1]) + name[1:] +} + +func firstToUpper(name string) string { + return strings.ToUpper(name[:1]) + name[1:] +} + +func packageName(e ast.Expr) string { + switch tp := e.(type) { + case *ast.Ident: + return tp.Name + case *ast.SelectorExpr: + return fmt.Sprintf("%s.%s", packageName(tp.X), packageName(tp.Sel)) + } + return "" +} + +func receiverShort(receiver string) string { + return strings.ToLower(trimAsterisk(receiver))[:1] +} + +func trimAsterisk(name string) string { + return strings.TrimPrefix(name, "*") +} + +func typeName(name, pkg string) string { + return strings.TrimPrefix(name, pkg+".") +} + +func uniqueNonEmptyStrings(s []string) []string { + var unique []string + set := map[string]interface{}{} + for _, val := range s { + if val == "" { + continue + } + if _, ok := set[val]; !ok { + unique = append(unique, val) + set[val] = val + } + } + return unique +} + +func varName(name string) string { + name = firstToLower(name) + + // Adjust some reserved keywords, see https://golang.org/ref/spec#Keywords + switch name { + case "type": + name = "typ" + case "package": + name = "pkg" + // Not reserved, but syntax highlighters has it as a keyword. + case "len": + name = "length" + } + + return name + +} diff --git a/hugolib/page_resource.go b/codegen/methods2_test.go similarity index 70% rename from hugolib/page_resource.go rename to codegen/methods2_test.go index 201076e8b0b..bd36b5e80f5 100644 --- a/hugolib/page_resource.go +++ b/codegen/methods2_test.go @@ -1,4 +1,4 @@ -// Copyright 2017-present The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -package hugolib +package codegen -import ( - "github.com/gohugoio/hugo/resources/resource" -) - -var ( - _ resource.Resource = (*Page)(nil) - _ resource.Resource = (*PageOutput)(nil) -) +type IEmbed interface { + MethodEmbed3(s string) string + MethodEmbed1() string + MethodEmbed2() +} diff --git a/codegen/methods_test.go b/codegen/methods_test.go new file mode 100644 index 00000000000..fad6da078ab --- /dev/null +++ b/codegen/methods_test.go @@ -0,0 +1,100 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codegen + +import ( + "fmt" + "net" + "os" + "reflect" + "testing" + + "github.com/gohugoio/hugo/common/herrors" + "github.com/stretchr/testify/require" +) + +func TestMethods(t *testing.T) { + + var ( + zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem() + zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem() + zeroI = reflect.TypeOf((*I)(nil)).Elem() + ) + + dir, _ := os.Getwd() + c := NewInspector(dir) + + t.Run("MethodsFromTypes", func(t *testing.T) { + assert := require.New(t) + + methods := c.MethodsFromTypes([]reflect.Type{zeroI}, nil) + + methodsStr := fmt.Sprint(methods) + + assert.Contains(methodsStr, "Method1(arg0 herrors.ErrorContext)") + assert.Contains(methodsStr, "Method7() interface {}") + assert.Contains(methodsStr, "Method0() string\n Method4() string") + assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string\n MethodEmbed1() string") + + assert.Contains(methods.Imports(), "github.com/gohugoio/hugo/common/herrors") + }) + + t.Run("EmbedOnly", func(t *testing.T) { + assert := require.New(t) + + methods := c.MethodsFromTypes([]reflect.Type{zeroIEOnly}, nil) + + methodsStr := fmt.Sprint(methods) + + assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string") + + }) + + t.Run("ToMarshalJSON", func(t *testing.T) { + assert := require.New(t) + + m, pkg := c.MethodsFromTypes( + []reflect.Type{zeroI}, + []reflect.Type{zeroIE}).ToMarshalJSON("*page", "page") + + assert.Contains(m, "method6 := p.Method6()") + assert.Contains(m, "Method0: method0,") + assert.Contains(m, "return json.Marshal(&s)") + + assert.Contains(pkg, "github.com/gohugoio/hugo/common/herrors") + assert.Contains(pkg, "encoding/json") + + fmt.Println(pkg) + + }) + +} + +type I interface { + IEmbed + Method0() string + Method4() string + Method1(myerr herrors.ErrorContext) + Method3(myint int, mystring string) + Method5() (string, error) + Method6() *net.IP + Method7() interface{} + Method8() herrors.ErrorContext + method2() + method9() os.FileInfo +} + +type IEOnly interface { + IEmbed +} diff --git a/commands/commandeer.go b/commands/commandeer.go index 8abb6418d7c..14f5b9c7332 100644 --- a/commands/commandeer.go +++ b/commands/commandeer.go @@ -357,6 +357,10 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error { c.changeDetector = changeDetector } + if c.Cfg.GetBool("logPathWarnings") { + fs.Destination = hugofs.NewCreateCountingFs(fs.Destination) + } + err = c.initFs(fs) if err != nil { return diff --git a/commands/commands.go b/commands/commands.go index 38291fd958a..f41bb0a5818 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -23,7 +23,6 @@ import ( "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/helpers" "github.com/spf13/cobra" - "github.com/spf13/nitro" ) type commandsBuilder struct { @@ -197,6 +196,12 @@ type hugoBuilderCommon struct { gc bool + // Profile flags (for debugging of performance problems) + cpuprofile string + memprofile string + mutexprofile string + traceprofile string + // TODO(bep) var vs string logging bool verbose bool @@ -255,13 +260,22 @@ func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) { cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages") cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build") - cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program") cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions") cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics") cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.") cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files") cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files") cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations") + cmd.Flags().BoolP("path-warnings", "", false, "print warnings on duplicate target paths etc.") + cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`") + cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`") + cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`") + cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)") + + // Hide these for now. + cmd.Flags().MarkHidden("profile-cpu") + cmd.Flags().MarkHidden("profile-mem") + cmd.Flags().MarkHidden("profile-mutex") cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)") diff --git a/commands/commands_test.go b/commands/commands_test.go index 2e8b99dc413..6e13f4cc984 100644 --- a/commands/commands_test.go +++ b/commands/commands_test.go @@ -41,7 +41,7 @@ func TestExecute(t *testing.T) { assert.NoError(resp.Err) result := resp.Result assert.True(len(result.Sites) == 1) - assert.True(len(result.Sites[0].RegularPages) == 1) + assert.True(len(result.Sites[0].RegularPages()) == 1) } func TestCommandsPersistentFlags(t *testing.T) { @@ -75,6 +75,7 @@ func TestCommandsPersistentFlags(t *testing.T) { "--port=1366", "--renderToDisk", "--source=mysource", + "--path-warnings", }, func(commands []cmder) { var sc *serverCmd for _, command := range commands { @@ -112,6 +113,9 @@ func TestCommandsPersistentFlags(t *testing.T) { assert.True(cfg.GetBool("gc")) + // The flag is named path-warnings + assert.True(cfg.GetBool("logPathWarnings")) + // The flag is named i18n-warnings assert.True(cfg.GetBool("logI18nWarnings")) diff --git a/commands/convert.go b/commands/convert.go index c4f88a24537..88410d9bd59 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -20,6 +20,8 @@ import ( "strings" "time" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/helpers" @@ -124,8 +126,8 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { site := h.Sites[0] - site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files") - for _, p := range site.AllPages { + site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files") + for _, p := range site.AllPages() { if err := cc.convertAndSavePage(p, site, format); err != nil { return err } @@ -133,24 +135,24 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { return nil } -func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { +func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { // The resources are not in .Site.AllPages. - for _, r := range p.Resources.ByType("page") { - if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil { + for _, r := range p.Resources().ByType("page") { + if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil { return err } } - if p.Filename() == "" { + if p.File() == nil { // No content file. return nil } errMsg := fmt.Errorf("Error processing file %q", p.Path()) - site.Log.INFO.Println("Attempting to convert", p.LogicalName()) + site.Log.INFO.Println("Attempting to convert", p.File().Filename()) - f, _ := p.File.(src.ReadableFile) + f, _ := p.File().(src.ReadableFile) file, err := f.Open() if err != nil { site.Log.ERROR.Println(errMsg) @@ -186,7 +188,7 @@ func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, ta newContent.Write(pf.content) - newFilename := p.Filename() + newFilename := p.File().Filename() if cc.outputDir != "" { contentDir := strings.TrimSuffix(newFilename, p.Path()) diff --git a/commands/hugo.go b/commands/hugo.go index 3690c0ad519..571a1eec466 100644 --- a/commands/hugo.go +++ b/commands/hugo.go @@ -18,11 +18,16 @@ package commands import ( "fmt" "io/ioutil" - "os/signal" + "runtime/pprof" + "runtime/trace" "sort" "sync/atomic" + "github.com/gohugoio/hugo/hugofs" + + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/common/hugo" "github.com/pkg/errors" @@ -214,6 +219,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) { "themesDir", "verbose", "verboseLog", + "duplicateTargetPaths", } // Will set a value even if it is the default. @@ -235,6 +241,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) { // Set some "config aliases" setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false) setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false) + setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false) } @@ -290,6 +297,7 @@ func (c *commandeer) fullBuild() error { } copyStaticFunc := func() error { + cnt, err := c.copyStatic() if err != nil { if !os.IsNotExist(err) { @@ -326,7 +334,7 @@ func (c *commandeer) fullBuild() error { } for _, s := range c.hugo.Sites { - s.ProcessingStats.Static = langCount[s.Language.Lang] + s.ProcessingStats.Static = langCount[s.Language().Lang] } if c.h.gc { @@ -344,9 +352,125 @@ func (c *commandeer) fullBuild() error { } +func (c *commandeer) initCPUProfile() (func(), error) { + if c.h.cpuprofile == "" { + return nil, nil + } + + f, err := os.Create(c.h.cpuprofile) + if err != nil { + return nil, errors.Wrap(err, "failed to create CPU profile") + } + if err := pprof.StartCPUProfile(f); err != nil { + return nil, errors.Wrap(err, "failed to start CPU profile") + } + return func() { + pprof.StopCPUProfile() + f.Close() + }, nil +} + +func (c *commandeer) initMemProfile() { + if c.h.memprofile == "" { + return + } + + f, err := os.Create(c.h.memprofile) + if err != nil { + c.logger.ERROR.Println("could not create memory profile: ", err) + } + defer f.Close() + runtime.GC() // get up-to-date statistics + if err := pprof.WriteHeapProfile(f); err != nil { + c.logger.ERROR.Println("could not write memory profile: ", err) + } +} + +func (c *commandeer) initTraceProfile() (func(), error) { + if c.h.traceprofile == "" { + return nil, nil + } + + f, err := os.Create(c.h.traceprofile) + if err != nil { + return nil, errors.Wrap(err, "failed to create trace file") + } + + if err := trace.Start(f); err != nil { + return nil, errors.Wrap(err, "failed to start trace") + } + + return func() { + trace.Stop() + f.Close() + }, nil +} + +func (c *commandeer) initMutexProfile() (func(), error) { + if c.h.mutexprofile == "" { + return nil, nil + } + + f, err := os.Create(c.h.mutexprofile) + if err != nil { + return nil, err + } + + runtime.SetMutexProfileFraction(1) + + return func() { + pprof.Lookup("mutex").WriteTo(f, 0) + f.Close() + }, nil + +} + +func (c *commandeer) initProfiling() (func(), error) { + stopCPUProf, err := c.initCPUProfile() + if err != nil { + return nil, err + } + + defer c.initMemProfile() + + stopMutexProf, err := c.initMutexProfile() + if err != nil { + return nil, err + } + + stopTraceProf, err := c.initTraceProfile() + if err != nil { + return nil, err + } + + return func() { + if stopCPUProf != nil { + stopCPUProf() + } + if stopMutexProf != nil { + stopMutexProf() + } + + if stopTraceProf != nil { + stopTraceProf() + } + }, nil +} + func (c *commandeer) build() error { defer c.timeTrack(time.Now(), "Total") + stopProfiling, err := c.initProfiling() + if err != nil { + return err + } + + defer func() { + if stopProfiling != nil { + stopProfiling() + } + }() + if err := c.fullBuild(); err != nil { return err } @@ -356,6 +480,13 @@ func (c *commandeer) build() error { fmt.Println() c.hugo.PrintProcessingStats(os.Stdout) fmt.Println() + + if createCounter, ok := c.destinationFs.(hugofs.DuplicatesReporter); ok { + dupes := createCounter.ReportDuplicates() + if dupes != "" { + c.logger.WARN.Println("Duplicate target paths:", dupes) + } + } } if c.h.buildWatch { @@ -369,7 +500,7 @@ func (c *commandeer) build() error { checkErr(c.Logger, err) defer watcher.Close() - var sigs = make(chan os.Signal) + var sigs = make(chan os.Signal, 1) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) <-sigs @@ -381,6 +512,17 @@ func (c *commandeer) build() error { func (c *commandeer) serverBuild() error { defer c.timeTrack(time.Now(), "Total") + stopProfiling, err := c.initProfiling() + if err != nil { + return err + } + + defer func() { + if stopProfiling != nil { + stopProfiling() + } + }() + if err := c.fullBuild(); err != nil { return err } @@ -474,11 +616,9 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6 } c.logger.INFO.Println("syncing static files to", publishDir) - var err error - // because we are using a baseFs (to get the union right). // set sync src to root - err = syncer.Sync(publishDir, helpers.FilePathSeparator) + err := syncer.Sync(publishDir, helpers.FilePathSeparator) if err != nil { return 0, err } @@ -619,13 +759,6 @@ func (c *commandeer) getDirList() ([]string, error) { return a, nil } -func (c *commandeer) resetAndBuildSites() (err error) { - if !c.h.quiet { - c.logger.FEEDBACK.Println("Started building sites ...") - } - return c.hugo.Build(hugolib.BuildCfg{ResetState: true}) -} - func (c *commandeer) buildSites() (err error) { return c.hugo.Build(hugolib.BuildCfg{}) } @@ -973,7 +1106,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher, navigate := c.Cfg.GetBool("navigateToChanged") // We have fetched the same page above, but it may have // changed. - var p *hugolib.Page + var p page.Page if navigate { if onePageName != "" { @@ -982,7 +1115,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher, } if p != nil { - livereload.NavigateToPathForPort(p.RelPermalink(), p.Site.ServerPort()) + livereload.NavigateToPathForPort(p.RelPermalink(), p.Site().ServerPort()) } else { livereload.ForceRefresh() } @@ -1044,9 +1177,11 @@ func (c *commandeer) isThemeVsHugoVersionMismatch(fs afero.Fs) (dir string, mism } b, err := afero.ReadFile(fs, path) + if err != nil { + continue + } tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML) - if err != nil { continue } diff --git a/commands/import_jekyll.go b/commands/import_jekyll.go index d3301b48f2f..634bfed71ce 100644 --- a/commands/import_jekyll.go +++ b/commands/import_jekyll.go @@ -340,7 +340,7 @@ func copyDir(source string, dest string) error { if err != nil { return err } - entries, err := ioutil.ReadDir(source) + entries, _ := ioutil.ReadDir(source) for _, entry := range entries { sfp := filepath.Join(source, entry.Name()) dfp := filepath.Join(dest, entry.Name()) @@ -373,6 +373,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos return err } entries, err := ioutil.ReadDir(jekyllRoot) + if err != nil { + return err + } + for _, entry := range entries { sfp := filepath.Join(jekyllRoot, entry.Name()) dfp := filepath.Join(dest, entry.Name()) @@ -464,7 +468,7 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b fs := hugofs.Os if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil { - return fmt.Errorf("Failed to save file %q:", filename) + return fmt.Errorf("failed to save file %q: %s", filename, err) } return nil diff --git a/commands/list.go b/commands/list.go index 9922e957df8..5f3366fd025 100644 --- a/commands/list.go +++ b/commands/list.go @@ -17,6 +17,7 @@ import ( "path/filepath" "github.com/gohugoio/hugo/hugolib" + "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/cobra" jww "github.com/spf13/jwalterweatherman" ) @@ -67,7 +68,7 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.", for _, p := range sites.Pages() { if p.IsDraft() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } @@ -102,8 +103,8 @@ posted in the future.`, } for _, p := range sites.Pages() { - if p.IsFuture() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsFuture(p) { + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } @@ -138,8 +139,8 @@ expired.`, } for _, p := range sites.Pages() { - if p.IsExpired() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsExpired(p) { + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } diff --git a/commands/new_content_test.go b/commands/new_content_test.go index fb8bca7b4cc..ecdf2ac3da0 100644 --- a/commands/new_content_test.go +++ b/commands/new_content_test.go @@ -62,7 +62,7 @@ func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) { _, fs := newTestCfg() n := newNewSiteCmd() - require.NoError(t, fs.Source.MkdirAll(basepath, 777)) + require.NoError(t, fs.Source.MkdirAll(basepath, 0777)) require.NoError(t, n.doNewSite(fs, basepath, false)) } @@ -72,7 +72,7 @@ func TestDoNewSite_error_base_exists(t *testing.T) { _, fs := newTestCfg() n := newNewSiteCmd() - require.NoError(t, fs.Source.MkdirAll(basepath, 777)) + require.NoError(t, fs.Source.MkdirAll(basepath, 0777)) _, err := fs.Source.Create(filepath.Join(basepath, "foo")) require.NoError(t, err) // Since the directory already exists and isn't empty, expect an error @@ -85,7 +85,7 @@ func TestDoNewSite_force_empty_dir(t *testing.T) { _, fs := newTestCfg() n := newNewSiteCmd() - require.NoError(t, fs.Source.MkdirAll(basepath, 777)) + require.NoError(t, fs.Source.MkdirAll(basepath, 0777)) require.NoError(t, n.doNewSite(fs, basepath, true)) @@ -99,7 +99,7 @@ func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) { contentPath := filepath.Join(basepath, "content") - require.NoError(t, fs.Source.MkdirAll(contentPath, 777)) + require.NoError(t, fs.Source.MkdirAll(contentPath, 0777)) require.Error(t, n.doNewSite(fs, basepath, true)) } @@ -109,7 +109,7 @@ func TestDoNewSite_error_force_config_inside_exists(t *testing.T) { n := newNewSiteCmd() configPath := filepath.Join(basepath, "config.toml") - require.NoError(t, fs.Source.MkdirAll(basepath, 777)) + require.NoError(t, fs.Source.MkdirAll(basepath, 0777)) _, err := fs.Source.Create(configPath) require.NoError(t, err) diff --git a/commands/server.go b/commands/server.go index c2bd76dae54..c5041d7728b 100644 --- a/commands/server.go +++ b/commands/server.go @@ -358,7 +358,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro if err := f.c.partialReRender(p); err != nil { f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", p)) if f.c.showErrorInBrowser { - http.Redirect(w, r, p, 301) + http.Redirect(w, r, p, http.StatusMovedPermanently) return } } @@ -386,7 +386,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro return mu, u.String(), endpoint, nil } -var logErrorRe = regexp.MustCompile("(?s)ERROR \\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2} ") +var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} `) func removeErrorPrefixFromLog(content string) string { return logErrorRe.ReplaceAllLiteralString(content, "") @@ -403,7 +403,7 @@ func (c *commandeer) serve(s *serverCmd) error { if isMultiHost { for _, s := range c.hugo.Sites { baseURLs = append(baseURLs, s.BaseURL.String()) - roots = append(roots, s.Language.Lang) + roots = append(roots, s.Language().Lang) } } else { s := c.hugo.Sites[0] @@ -430,7 +430,7 @@ func (c *commandeer) serve(s *serverCmd) error { livereload.Initialize() } - var sigs = make(chan os.Signal) + var sigs = make(chan os.Signal, 1) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) for i := range baseURLs { diff --git a/common/collections/append.go b/common/collections/append.go index b9a9419cb62..1dfeff4e3b9 100644 --- a/common/collections/append.go +++ b/common/collections/append.go @@ -92,9 +92,7 @@ func appendToInterfaceSlice(tov reflect.Value, from ...interface{}) ([]interface tos = append(tos, tov.Index(i).Interface()) } - for _, v := range from { - tos = append(tos, v) - } + tos = append(tos, from...) return tos, nil } diff --git a/common/collections/slice_test.go b/common/collections/slice_test.go index 1103e2feaa2..b18dd36a7a3 100644 --- a/common/collections/slice_test.go +++ b/common/collections/slice_test.go @@ -75,11 +75,11 @@ func (p *tstSlicerIn2) Slice(in interface{}) (interface{}, error) { } func (p *tstSlicerIn1) Name() string { - return p.Name() + return p.name } func (p *tstSlicerIn2) Name() string { - return p.Name() + return p.name } func (p *tstSlicer) Slice(in interface{}) (interface{}, error) { diff --git a/common/hugio/readers.go b/common/hugio/readers.go index ba55e2d08da..92c5ba8151c 100644 --- a/common/hugio/readers.go +++ b/common/hugio/readers.go @@ -32,6 +32,7 @@ type ReadSeekCloser interface { } // ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close. +// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense. type ReadSeekerNoOpCloser struct { ReadSeeker } diff --git a/common/maps/scratch.go b/common/maps/scratch.go index 2972e202200..6862a8f8455 100644 --- a/common/maps/scratch.go +++ b/common/maps/scratch.go @@ -28,6 +28,24 @@ type Scratch struct { mu sync.RWMutex } +// Scratcher provides a scratching service. +type Scratcher interface { + Scratch() *Scratch +} + +type scratcher struct { + s *Scratch +} + +func (s scratcher) Scratch() *Scratch { + return s.s +} + +// NewScratcher creates a new Scratcher. +func NewScratcher() Scratcher { + return scratcher{s: NewScratch()} +} + // Add will, for single values, add (using the + operator) the addend to the existing addend (if found). // Supports numeric values and strings. // diff --git a/common/types/types.go b/common/types/types.go index 95e72d99b6c..414e50c11a2 100644 --- a/common/types/types.go +++ b/common/types/types.go @@ -16,6 +16,7 @@ package types import ( "fmt" + "reflect" "github.com/spf13/cast" ) @@ -56,3 +57,18 @@ func NewKeyValuesStrings(key string, values ...string) KeyValues { type Zeroer interface { IsZero() bool } + +// IsNil reports whether v is nil. +func IsNil(v interface{}) bool { + if v == nil { + return true + } + + value := reflect.ValueOf(v) + switch value.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return value.IsNil() + } + + return false +} diff --git a/config/configProvider.go b/config/configProvider.go index bc0dd950d7a..89cfe4359e1 100644 --- a/config/configProvider.go +++ b/config/configProvider.go @@ -40,3 +40,15 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string { } return cast.ToStringSlice(sd) } + +// SetBaseTestDefaults provides some common config defaults used in tests. +func SetBaseTestDefaults(cfg Provider) { + cfg.Set("resourceDir", "resources") + cfg.Set("contentDir", "content") + cfg.Set("dataDir", "data") + cfg.Set("i18nDir", "i18n") + cfg.Set("layoutDir", "layouts") + cfg.Set("assetDir", "assets") + cfg.Set("archetypeDir", "archetypes") + cfg.Set("publishDir", "public") +} diff --git a/config/services/servicesConfig.go b/config/services/servicesConfig.go index 7306f527483..871ffcac9d6 100644 --- a/config/services/servicesConfig.go +++ b/config/services/servicesConfig.go @@ -23,6 +23,7 @@ const ( disqusShortnameKey = "disqusshortname" googleAnalyticsKey = "googleanalytics" + rssLimitKey = "rssLimit" ) // Config is a privacy configuration for all the relevant services in Hugo. @@ -31,6 +32,7 @@ type Config struct { GoogleAnalytics GoogleAnalytics Instagram Instagram Twitter Twitter + RSS RSS } // Disqus holds the functional configuration settings related to the Disqus template. @@ -61,6 +63,12 @@ type Twitter struct { DisableInlineCSS bool } +// RSS holds the functional configuration settings related to the RSS feeds. +type RSS struct { + // Limit the number of pages. + Limit int +} + // DecodeConfig creates a services Config from a given Hugo configuration. func DecodeConfig(cfg config.Provider) (c Config, err error) { m := cfg.GetStringMap(servicesConfigKey) @@ -76,5 +84,9 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) { c.Disqus.Shortname = cfg.GetString(disqusShortnameKey) } + if c.RSS.Limit == 0 { + c.RSS.Limit = cfg.GetInt(rssLimitKey) + } + return } diff --git a/hugolib/sitemap.go b/config/sitemap.go similarity index 73% rename from hugolib/sitemap.go rename to config/sitemap.go index 64d6f5b7a75..4031b7ec115 100644 --- a/hugolib/sitemap.go +++ b/config/sitemap.go @@ -1,4 +1,4 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package hugolib +package config import ( "github.com/spf13/cast" @@ -25,21 +25,20 @@ type Sitemap struct { Filename string } -func parseSitemap(input map[string]interface{}) Sitemap { - sitemap := Sitemap{Priority: -1, Filename: "sitemap.xml"} +func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap { for key, value := range input { switch key { case "changefreq": - sitemap.ChangeFreq = cast.ToString(value) + prototype.ChangeFreq = cast.ToString(value) case "priority": - sitemap.Priority = cast.ToFloat64(value) + prototype.Priority = cast.ToFloat64(value) case "filename": - sitemap.Filename = cast.ToString(value) + prototype.Filename = cast.ToString(value) default: jww.WARN.Printf("Unknown Sitemap field: %s\n", key) } } - return sitemap + return prototype } diff --git a/create/content.go b/create/content.go index 31b7b2e4d70..8ac075ac3ee 100644 --- a/create/content.go +++ b/create/content.go @@ -50,7 +50,7 @@ func NewContent( if isDir { - langFs := hugofs.NewLanguageFs(s.Language.Lang, sites.LanguageSet(), archetypeFs) + langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs) cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename) if err != nil { @@ -113,7 +113,7 @@ func NewContent( func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site { for _, s := range sites.Sites { - if fi.Lang() == s.Language.Lang { + if fi.Lang() == s.Language().Lang { return s } } @@ -245,7 +245,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string // Try the filename: my-post.en.md for _, ss := range sites.Sites { - if strings.Contains(targetPath, "."+ss.Language.Lang+".") { + if strings.Contains(targetPath, "."+ss.Language().Lang+".") { s = ss break } diff --git a/deps/deps.go b/deps/deps.go index 628019961bc..47159d017c2 100644 --- a/deps/deps.go +++ b/deps/deps.go @@ -7,13 +7,14 @@ import ( "github.com/pkg/errors" "github.com/gohugoio/hugo/cache/filecache" - "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/metrics" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources" @@ -67,7 +68,7 @@ type Deps struct { Language *langs.Language // The site building. - Site hugo.Site + Site page.Site // All the output formats available for the current site. OutputFormatsConfig output.Formats @@ -325,7 +326,7 @@ type DepsCfg struct { Language *langs.Language // The Site in use - Site hugo.Site + Site page.Site // The configuration to use. Cfg config.Provider diff --git a/docs/content/en/variables/page.md b/docs/content/en/variables/page.md index 9dcbdcc435e..c4ddc820040 100644 --- a/docs/content/en/variables/page.md +++ b/docs/content/en/variables/page.md @@ -79,8 +79,7 @@ See [`.Scratch`](/functions/scratch/) for page-scoped, writable variables. : the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections. .Language -: a language object that points to the language's definition in the site -`config`. +: a language object that points to the language's definition in the site `config`. `.Language.Lang` gives you the language code. .Lastmod : the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter. @@ -93,10 +92,7 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. .LinkTitle : access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`. -.Next (deprecated) -: In older Hugo versions this pointer went the wrong direction. Please use `.PrevPage` instead. - -.NextPage +.Next : Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`. .NextInSection @@ -119,9 +115,6 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. : the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice. .Prev (deprecated) -: In older Hugo versions this pointer went the wrong direction. Please use `.NextPage` instead. - -.PrevPage : Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`. .PrevInSection @@ -130,8 +123,8 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. .PublishDate : the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`. -.RSSLink -: link to the taxonomies' RSS link. +.RSSLink (deprecated) +: link to the page's RSS feed. This is deprecated. You should instead do something like this: `{{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`. .RawContent : raw markdown content without the front matter. Useful with [remarkjs.com]( diff --git a/go.mod b/go.mod index 63228a46d8f..44520ca6130 100644 --- a/go.mod +++ b/go.mod @@ -44,7 +44,6 @@ require ( github.com/spf13/cobra v0.0.3 github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 github.com/spf13/jwalterweatherman v1.1.0 - github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d github.com/spf13/pflag v1.0.3 github.com/spf13/viper v1.3.2 github.com/stretchr/testify v1.3.0 diff --git a/go.sum b/go.sum index b0646e88cfb..b68f6306858 100644 --- a/go.sum +++ b/go.sum @@ -126,8 +126,6 @@ github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9 github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d h1:ihvj2nmx8eqWjlgNgdW6h0DyGJuq5GiwHadJkG0wXtQ= -github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d/go.mod h1:jU8A+8xL+6n1OX4XaZtCj4B3mIa64tULUsD6YegdpFo= github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= diff --git a/helpers/content.go b/helpers/content.go index 644942cb144..327ebab885b 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -57,7 +57,7 @@ type ContentSpec struct { Highlight func(code, lang, optsStr string) (string, error) defatultPygmentsOpts map[string]string - cfg config.Provider + Cfg config.Provider } // NewContentSpec returns a ContentSpec initialized @@ -73,7 +73,7 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) { BuildExpired: cfg.GetBool("buildExpired"), BuildDrafts: cfg.GetBool("buildDrafts"), - cfg: cfg, + Cfg: cfg, } // Highlighting setup @@ -382,7 +382,7 @@ func (c *ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingConte return &HugoMmarkHTMLRenderer{ cs: c, Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters), - Cfg: c.cfg, + Cfg: c.Cfg, } } diff --git a/helpers/content_renderer_test.go b/helpers/content_renderer_test.go index a01014b4eb3..db61cbaeffa 100644 --- a/helpers/content_renderer_test.go +++ b/helpers/content_renderer_test.go @@ -24,7 +24,7 @@ import ( // Renders a codeblock using Blackfriday func (c ContentSpec) render(input string) string { - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} render := c.getHTMLRenderer(0, ctx) buf := &bytes.Buffer{} @@ -34,7 +34,7 @@ func (c ContentSpec) render(input string) string { // Renders a codeblock using Mmark func (c ContentSpec) renderWithMmark(input string) string { - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} render := c.getMmarkHTMLRenderer(0, ctx) buf := &bytes.Buffer{} diff --git a/helpers/content_test.go b/helpers/content_test.go index 5297df2de2a..6971a8fc8b0 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -181,7 +181,7 @@ func TestTruncateWordsByRune(t *testing.T) { func TestGetHTMLRendererFlags(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx) flags := renderer.GetFlags() if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML { @@ -210,7 +210,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) { {blackfriday.HTML_SMARTYPANTS_LATEX_DASHES}, } defaultFlags := blackfriday.HTML_USE_XHTML - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.AngledQuotes = true ctx.Config.Fractions = true ctx.Config.HrefTargetBlank = true @@ -235,7 +235,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) { func TestGetHTMLRendererAnchors(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.DocumentID = "testid" ctx.Config.PlainIDAnchors = false @@ -259,7 +259,7 @@ func TestGetHTMLRendererAnchors(t *testing.T) { func TestGetMmarkHTMLRenderer(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.DocumentID = "testid" ctx.Config.PlainIDAnchors = false actualRenderer := c.getMmarkHTMLRenderer(0, ctx) @@ -283,7 +283,7 @@ func TestGetMmarkHTMLRenderer(t *testing.T) { func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"headerId"} ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"} @@ -298,7 +298,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) { testFlag int } c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{""} ctx.Config.ExtensionsMask = []string{""} allExtensions := []data{ @@ -330,7 +330,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) { func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"definitionLists"} ctx.Config.ExtensionsMask = []string{""} @@ -342,7 +342,7 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { func TestGetMarkdownRenderer(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.markdownRender(ctx) expectedRenderedMarkdown := []byte("

testContent

\n") @@ -353,7 +353,7 @@ func TestGetMarkdownRenderer(t *testing.T) { func TestGetMarkdownRendererWithTOC(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{RenderTOC: true, Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.markdownRender(ctx) expectedRenderedMarkdown := []byte("\n\n

testContent

\n") @@ -368,7 +368,7 @@ func TestGetMmarkExtensions(t *testing.T) { testFlag int } c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"tables"} ctx.Config.ExtensionsMask = []string{""} allExtensions := []data{ @@ -397,7 +397,7 @@ func TestGetMmarkExtensions(t *testing.T) { func TestMmarkRender(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.mmarkRender(ctx) expectedRenderedMarkdown := []byte("

testContent

\n") diff --git a/helpers/general.go b/helpers/general.go index 00caf1ecc91..57146f3aeed 100644 --- a/helpers/general.go +++ b/helpers/general.go @@ -92,7 +92,7 @@ func GuessType(in string) string { return "org" } - return "unknown" + return "" } // FirstUpper returns a string with the first character as upper case. @@ -329,8 +329,7 @@ func Deprecated(object, item, alternative string, err bool) { DistinctErrorLog.Printf("%s's %s is deprecated and will be removed in Hugo %s. %s", object, item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative) } else { - // Make sure the users see this while avoiding build breakage. This will not lead to an os.Exit(-1) - DistinctFeedbackLog.Printf("WARNING: %s's %s is deprecated and will be removed in a future release. %s", object, item, alternative) + DistinctWarnLog.Printf("%s's %s is deprecated and will be removed in a future release. %s", object, item, alternative) } } diff --git a/helpers/general_test.go b/helpers/general_test.go index 1279df43948..b6c7a14d968 100644 --- a/helpers/general_test.go +++ b/helpers/general_test.go @@ -42,7 +42,7 @@ func TestGuessType(t *testing.T) { {"html", "html"}, {"htm", "html"}, {"org", "org"}, - {"excel", "unknown"}, + {"excel", ""}, } { result := GuessType(this.in) if result != this.expect { @@ -166,6 +166,27 @@ var containsAdditionalTestData = []struct { {"", []byte(""), false}, } +func TestSliceToLower(t *testing.T) { + t.Parallel() + tests := []struct { + value []string + expected []string + }{ + {[]string{"a", "b", "c"}, []string{"a", "b", "c"}}, + {[]string{"a", "B", "c"}, []string{"a", "b", "c"}}, + {[]string{"A", "B", "C"}, []string{"a", "b", "c"}}, + } + + for _, test := range tests { + res := SliceToLower(test.value) + for i, val := range res { + if val != test.expected[i] { + t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i]) + } + } + } +} + func TestReaderContains(t *testing.T) { for i, this := range append(containsBenchTestData, containsAdditionalTestData...) { result := ReaderContains(strings.NewReader(this.v1), this.v2) diff --git a/helpers/path.go b/helpers/path.go index bf7e3bf9903..f3b04ce66a8 100644 --- a/helpers/path.go +++ b/helpers/path.go @@ -86,6 +86,13 @@ func (p *PathSpec) MakePath(s string) string { return p.UnicodeSanitize(s) } +// MakePathsSanitized applies MakePathSanitized on every item in the slice +func (p *PathSpec) MakePathsSanitized(paths []string) { + for i, path := range paths { + paths[i] = p.MakePathSanitized(path) + } +} + // MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced func (p *PathSpec) MakePathSanitized(s string) string { if p.DisablePathToLower { diff --git a/helpers/pygments.go b/helpers/pygments.go index 4a90e353ded..abbbdce4cac 100644 --- a/helpers/pygments.go +++ b/helpers/pygments.go @@ -56,7 +56,7 @@ type highlighters struct { } func newHiglighters(cs *ContentSpec) highlighters { - return highlighters{cs: cs, ignoreCache: cs.cfg.GetBool("ignoreCache"), cacheDir: cs.cfg.GetString("cacheDir")} + return highlighters{cs: cs, ignoreCache: cs.Cfg.GetBool("ignoreCache"), cacheDir: cs.Cfg.GetString("cacheDir")} } func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) { diff --git a/htesting/test_structs.go b/htesting/test_structs.go index f5aa6ff2513..b0660e66169 100644 --- a/htesting/test_structs.go +++ b/htesting/test_structs.go @@ -14,8 +14,13 @@ package htesting import ( + "html/template" + "time" + "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/langs" + "github.com/gohugoio/hugo/navigation" + "github.com/gohugoio/hugo/resources/page" "github.com/spf13/viper" ) @@ -28,6 +33,22 @@ func (t testSite) Hugo() hugo.Info { return t.h } +func (t testSite) ServerPort() int { + return 1313 +} + +func (testSite) LastChange() (t time.Time) { + return +} + +func (t testSite) Title() string { + return "foo" +} + +func (t testSite) Sites() page.Sites { + return nil +} + func (t testSite) IsServer() bool { return false } @@ -36,8 +57,36 @@ func (t testSite) Language() *langs.Language { return t.l } +func (t testSite) Pages() page.Pages { + return nil +} + +func (t testSite) RegularPages() page.Pages { + return nil +} + +func (t testSite) Menus() navigation.Menus { + return nil +} + +func (t testSite) Taxonomies() interface{} { + return nil +} + +func (t testSite) BaseURL() template.URL { + return "" +} + +func (t testSite) Params() map[string]interface{} { + return nil +} + +func (t testSite) Data() map[string]interface{} { + return nil +} + // NewTestHugoSite creates a new minimal test site. -func NewTestHugoSite() hugo.Site { +func NewTestHugoSite() page.Site { return testSite{ h: hugo.NewInfo(hugo.EnvironmentProduction), l: langs.NewLanguage("en", newTestConfig()), diff --git a/hugofs/createcounting_fs.go b/hugofs/createcounting_fs.go new file mode 100644 index 00000000000..802806b7af1 --- /dev/null +++ b/hugofs/createcounting_fs.go @@ -0,0 +1,99 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugofs + +import ( + "fmt" + "os" + "sort" + "strings" + "sync" + + "github.com/spf13/afero" +) + +// Reseter is implemented by some of the stateful filesystems. +type Reseter interface { + Reset() +} + +// DuplicatesReporter reports about duplicate filenames. +type DuplicatesReporter interface { + ReportDuplicates() string +} + +func NewCreateCountingFs(fs afero.Fs) afero.Fs { + return &createCountingFs{Fs: fs, fileCount: make(map[string]int)} +} + +// ReportDuplicates reports filenames written more than once. +func (c *createCountingFs) ReportDuplicates() string { + c.mu.Lock() + defer c.mu.Unlock() + + var dupes []string + + for k, v := range c.fileCount { + if v > 1 { + dupes = append(dupes, fmt.Sprintf("%s (%d)", k, v)) + } + } + + if len(dupes) == 0 { + return "" + } + + sort.Strings(dupes) + + return strings.Join(dupes, ", ") +} + +// createCountingFs counts filenames of created files or files opened +// for writing. +type createCountingFs struct { + afero.Fs + + mu sync.Mutex + fileCount map[string]int +} + +func (c *createCountingFs) Reset() { + c.mu.Lock() + defer c.mu.Unlock() + + c.fileCount = make(map[string]int) +} + +func (fs *createCountingFs) onCreate(filename string) { + fs.mu.Lock() + defer fs.mu.Unlock() + + fs.fileCount[filename] = fs.fileCount[filename] + 1 +} + +func (fs *createCountingFs) Create(name string) (afero.File, error) { + f, err := fs.Fs.Create(name) + if err == nil { + fs.onCreate(name) + } + return f, err +} + +func (fs *createCountingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { + f, err := fs.Fs.OpenFile(name, flag, perm) + if err == nil && isWrite(flag) { + fs.onCreate(name) + } + return f, err +} diff --git a/hugofs/fs.go b/hugofs/fs.go index 52e27bd12f4..0accd7534ed 100644 --- a/hugofs/fs.go +++ b/hugofs/fs.go @@ -15,6 +15,8 @@ package hugofs import ( + "os" + "github.com/gohugoio/hugo/config" "github.com/spf13/afero" ) @@ -80,3 +82,7 @@ func getWorkingDirFs(base afero.Fs, cfg config.Provider) *afero.BasePathFs { return nil } + +func isWrite(flag int) bool { + return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0 +} diff --git a/hugofs/hashing_fs.go b/hugofs/hashing_fs.go index 2de027ce20e..9cdd19d91b1 100644 --- a/hugofs/hashing_fs.go +++ b/hugofs/hashing_fs.go @@ -67,10 +67,6 @@ func (fs *md5HashingFs) wrapFile(f afero.File) afero.File { return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver} } -func isWrite(flag int) bool { - return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0 -} - func (fs *md5HashingFs) Name() string { return "md5HashingFs" } diff --git a/hugolib/alias.go b/hugolib/alias.go index c44f32dbba1..e3f8297c451 100644 --- a/hugolib/alias.go +++ b/hugolib/alias.go @@ -26,6 +26,7 @@ import ( "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/helpers" @@ -55,7 +56,12 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al return aliasHandler{t, l, allowRoot} } -func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) { +type aliasPage struct { + Permalink string + page.Page +} + +func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) { t := "alias" if isXHTML { t = "alias-xhtml" @@ -75,12 +81,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i } } - data := struct { - Permalink string - Page *Page - }{ + data := aliasPage{ permalink, - page, + p, } buffer := new(bytes.Buffer) @@ -91,11 +94,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i return buffer, nil } -func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) { return s.publishDestAlias(false, path, permalink, outputFormat, p) } -func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) { handler := newAliasHandler(s.Tmpl, s.Log, allowRoot) isXHTML := strings.HasSuffix(path, ".xhtml") @@ -126,19 +129,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo func (a aliasHandler) targetPathAlias(src string) (string, error) { originalAlias := src if len(src) <= 0 { - return "", fmt.Errorf("Alias \"\" is an empty string") + return "", fmt.Errorf("alias \"\" is an empty string") } alias := filepath.Clean(src) components := strings.Split(alias, helpers.FilePathSeparator) if !a.allowRoot && alias == helpers.FilePathSeparator { - return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias) } // Validate against directory traversal if components[0] == ".." { - return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias) } // Handle Windows file and directory naming restrictions @@ -171,7 +174,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) { for _, m := range msgs { a.log.ERROR.Println(m) } - return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias) + return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias) } for _, m := range msgs { a.log.INFO.Println(m) diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go index da1b80b7007..109d01f14ed 100644 --- a/hugolib/alias_test.go +++ b/hugolib/alias_test.go @@ -50,7 +50,7 @@ func TestAlias(t *testing.T) { b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 1) + require.Len(t, b.H.Sites[0].RegularPages(), 1) // the real page b.AssertFileContent("public/page/index.html", "For some moments the old man") diff --git a/hugolib/collections.go b/hugolib/collections.go index cf75d373221..d6abab29dbb 100644 --- a/hugolib/collections.go +++ b/hugolib/collections.go @@ -14,19 +14,13 @@ package hugolib import ( - "fmt" - - "github.com/gohugoio/hugo/resources/resource" - "github.com/gohugoio/hugo/common/collections" + "github.com/gohugoio/hugo/resources/page" ) var ( - _ collections.Grouper = (*Page)(nil) - _ collections.Slicer = (*Page)(nil) - _ collections.Slicer = PageGroup{} - _ collections.Slicer = WeightedPage{} - _ resource.ResourcesConverter = Pages{} + _ collections.Grouper = (*pageState)(nil) + _ collections.Slicer = (*pageState)(nil) ) // collections.Slicer implementations below. We keep these bridge implementations @@ -35,50 +29,8 @@ var ( // Slice is not meant to be used externally. It's a bridge function // for the template functions. See collections.Slice. -func (p *Page) Slice(items interface{}) (interface{}, error) { - return toPages(items) -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p PageGroup) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case PageGroup: - return items, nil - case []interface{}: - groups := make(PagesGroup, len(items)) - for i, v := range items { - g, ok := v.(PageGroup) - if !ok { - return nil, fmt.Errorf("type %T is not a PageGroup", v) - } - groups[i] = g - } - return groups, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p WeightedPage) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case WeightedPages: - return items, nil - case []interface{}: - weighted := make(WeightedPages, len(items)) - for i, v := range items { - g, ok := v.(WeightedPage) - if !ok { - return nil, fmt.Errorf("type %T is not a WeightedPage", v) - } - weighted[i] = g - } - return weighted, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } +func (p *pageState) Slice(items interface{}) (interface{}, error) { + return page.ToPages(items) } // collections.Grouper implementations below @@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) { // Group creates a PageGroup from a key and a Pages object // This method is not meant for external use. It got its non-typed arguments to satisfy // a very generic interface in the tpl package. -func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) { - pages, err := toPages(in) +func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) { + pages, err := page.ToPages(in) if err != nil { return nil, err } - return PageGroup{Key: key, Pages: pages}, nil -} - -// ToResources wraps resource.ResourcesConverter -func (pages Pages) ToResources() resource.Resources { - r := make(resource.Resources, len(pages)) - for i, p := range pages { - r[i] = p - } - return r + return page.PageGroup{Key: key, Pages: pages}, nil } diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go index 9cf328a05f6..0cd936aef3e 100644 --- a/hugolib/collections_test.go +++ b/hugolib/collections_test.go @@ -40,7 +40,7 @@ title: "Page" b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", "cool: 2") } @@ -79,12 +79,12 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)", - "pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)", - `weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) + "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)", + "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)", + `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) } func TestAppendFunc(t *testing.T) { @@ -129,11 +129,11 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)", - "appendPages:9:hugolib.Pages:home/page", + "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)", + "appendPages:9:page.Pages:home/page", "appendStrings:[]string:[a b c d e]", "appendStringsSlice:[]string:[a b c c d]", "union:[]string:[a b c d e]", diff --git a/hugolib/config.go b/hugolib/config.go index 6a1de32beec..8d2f2965cb6 100644 --- a/hugolib/config.go +++ b/hugolib/config.go @@ -24,7 +24,6 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hugo" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugolib/paths" "github.com/pkg/errors" _errors "github.com/pkg/errors" @@ -177,14 +176,6 @@ type configLoader struct { ConfigSourceDescriptor } -func (l configLoader) wrapFileInfoError(err error, fi os.FileInfo) error { - rfi, ok := fi.(hugofs.RealFilenameInfo) - if !ok { - return err - } - return l.wrapFileError(err, rfi.RealFilename()) -} - func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) { baseDir := l.configFileDir() var baseFilename string @@ -240,11 +231,6 @@ func (l configLoader) wrapFileError(err error, filename string) error { return err } -func (l configLoader) newRealBaseFs(path string) afero.Fs { - return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs)) - -} - func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) { sourceFs := l.Fs configDir := l.AbsConfigDir @@ -274,7 +260,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) for _, configDir := range configDirs { err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error { - if fi == nil { + if fi == nil || err != nil { return nil } @@ -616,8 +602,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("removePathAccents", false) v.SetDefault("titleCaseStyle", "AP") v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"}) - v.SetDefault("permalinks", make(PermalinkOverrides, 0)) - v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"}) + v.SetDefault("permalinks", make(map[string]string)) + v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"}) v.SetDefault("pygmentsStyle", "monokai") v.SetDefault("pygmentsUseClasses", false) v.SetDefault("pygmentsCodeFences", false) @@ -625,7 +611,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("pygmentsOptions", "") v.SetDefault("disableLiveReload", false) v.SetDefault("pluralizeListTitles", true) - v.SetDefault("preserveTaxonomyNames", false) v.SetDefault("forceSyncStatic", false) v.SetDefault("footnoteAnchorPrefix", "") v.SetDefault("footnoteReturnLinkContents", "") diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go index 6685de4cc61..eed1f3e417e 100644 --- a/hugolib/datafiles_test.go +++ b/hugolib/datafiles_test.go @@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true}) - if !expectBuildError && !reflect.DeepEqual(expected, s.Data) { + if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) { // This disabled code detects the situation described in the WARNING message below. // The situation seems to only occur for TOML data with integer values. // Perhaps the TOML parser returns ints in another type. @@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey } */ - return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data) + return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data()) } return diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go index edada141912..bce88ed0d36 100644 --- a/hugolib/disableKinds_test.go +++ b/hugolib/disableKinds_test.go @@ -18,6 +18,8 @@ import ( "fmt" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/deps" "github.com/spf13/afero" @@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) { func TestDisableKindsSomeDisabled(t *testing.T) { t.Parallel() - doTestDisableKinds(t, KindSection, kind404) + doTestDisableKinds(t, page.KindSection, kind404) } func TestDisableKindsOneDisabled(t *testing.T) { t.Parallel() for _, kind := range allKinds { - if kind == KindPage { + if kind == page.KindPage { // Turning off regular page generation have some side-effects // not handled by the assertions below (no sections), so // skip that for now. @@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) { assertDisabledKind(th, func(isDisabled bool) bool { if isDisabled { - return len(s.RegularPages) == 0 + return len(s.RegularPages()) == 0 } - return len(s.RegularPages) > 0 - }, disabled, KindPage, "public/sect/p1/index.html", "Single|P1") + return len(s.RegularPages()) > 0 + }, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindHome) + p := s.getPage(page.KindHome) if isDisabled { return p == nil } return p != nil - }, disabled, KindHome, "public/index.html", "Home") + }, disabled, page.KindHome, "public/index.html", "Home") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindSection, "sect") + p := s.getPage(page.KindSection, "sect") if isDisabled { return p == nil } return p != nil - }, disabled, KindSection, "public/sect/index.html", "Sects") + }, disabled, page.KindSection, "public/sect/index.html", "Sects") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "tags", "tag1") + p := s.getPage(page.KindTaxonomy, "tags", "tag1") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1") + }, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "tags") + p := s.getPage(page.KindTaxonomyTerm, "tags") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags") + }, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "categories") + p := s.getPage(page.KindTaxonomyTerm, "categories") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms") + }, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "categories", "hugo") + p := s.getPage(page.KindTaxonomy, "categories", "hugo") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo") + }, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo") // The below have no page in any collection. assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "") assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap") @@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st if kind == kindRSS && !isDisabled { // If the home page is also disabled, there is not RSS to look for. - if stringSliceContains(KindHome, disabled...) { + if stringSliceContains(page.KindHome, disabled...) { isDisabled = true } } diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go index 3a6220b532f..3ec6947414e 100644 --- a/hugolib/embedded_shortcodes_test.go +++ b/hugolib/embedded_shortcodes_test.go @@ -20,6 +20,8 @@ import ( "strings" "testing" + "github.com/spf13/cast" + "path/filepath" "github.com/gohugoio/hugo/deps" @@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) - require.Len(t, s.RegularPages, 1) + require.Len(t, s.RegularPages(), 1) - output := string(s.RegularPages[0].content()) + content, err := s.RegularPages()[0].Content() + require.NoError(t, err) + output := cast.ToString(content) if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected) diff --git a/hugolib/gitinfo.go b/hugolib/gitinfo.go index d356fcf075e..bd50f10b89a 100644 --- a/hugolib/gitinfo.go +++ b/hugolib/gitinfo.go @@ -19,6 +19,7 @@ import ( "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/resources/page" ) type gitInfo struct { @@ -26,15 +27,12 @@ type gitInfo struct { repo *gitmap.GitRepo } -func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) { - if g == nil { - return nil, false - } - - name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir) +func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo { + name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir) name = strings.TrimPrefix(name, "/") - return g.repo.Files[name], true + return g.repo.Files[name] + } func newGitInfo(cfg config.Provider) (*gitInfo, error) { diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 9ce1c438e75..74d8600eab5 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -14,14 +14,24 @@ package hugolib import ( - "errors" "io" "path/filepath" "sort" "strings" "sync" + "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/parser/metadecoders" + + "github.com/gohugoio/hugo/hugofs" + + "github.com/pkg/errors" + + "github.com/gohugoio/hugo/source" + + "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/spf13/afero" "github.com/gohugoio/hugo/publisher" @@ -30,8 +40,10 @@ import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/langs" + "github.com/gohugoio/hugo/lazy" "github.com/gohugoio/hugo/i18n" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" ) @@ -48,17 +60,98 @@ type HugoSites struct { // If this is running in the dev server. running bool + // Render output formats for all sites. + renderFormats output.Formats + // Same as above, but with any duplicates removed. + // TODO(bep) page check if used + renderFormatsDistinct output.Formats + *deps.Deps + gitInfo *gitInfo + + // As loaded from the /data dirs + data map[string]interface{} + // Keeps track of bundle directories and symlinks to enable partial rebuilding. ContentChanges *contentChangeMap - // If enabled, keeps a revision map for all content. - gitInfo *gitInfo + init *hugoSitesInit + + *fatalErrorHandler +} + +type fatalErrorHandler struct { + mu sync.Mutex + + h *HugoSites + + logCount int + + done bool + donec chan bool // will be closed when done +} + +// FatalError error is used in some rare situations where it does not make sense to +// continue processing, to abort as soon as possible and log the error. +func (f *fatalErrorHandler) FatalError(err error) { + f.mu.Lock() + defer f.mu.Unlock() + if !f.done { + f.done = true + close(f.donec) + } + + if f.logCount < 5 { + f.logCount++ + f.h.Log.ERROR.Println(err) + } + +} + +func (f *fatalErrorHandler) Done() <-chan bool { + return f.donec +} + +type hugoSitesInit struct { + // Loads the data from all of the /data folders. + data *lazy.Init + + // Loads the Git info for all the pages if enabled. + gitInfo *lazy.Init + + // Maps page translations. + translations *lazy.Init } -func (h *HugoSites) siteInfos() SiteInfos { - infos := make(SiteInfos, len(h.Sites)) +func (h *hugoSitesInit) Reset() { + h.data.Reset() + h.gitInfo.Reset() + h.translations.Reset() +} + +func (h *HugoSites) Data() map[string]interface{} { + if _, err := h.init.data.Do(); err != nil { + h.SendError(errors.Wrap(err, "failed to load data")) + return nil + } + return h.data +} + +func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) { + if _, err := h.init.gitInfo.Do(); err != nil { + return nil, err + } + + if h.gitInfo == nil { + return nil, nil + } + + return h.gitInfo.forPage(p), nil +} + +func (h *HugoSites) siteInfos() page.Sites { + infos := make(page.Sites, len(h.Sites)) for i, site := range h.Sites { infos[i] = &site.Info } @@ -106,7 +199,7 @@ func (h *HugoSites) IsMultihost() bool { func (h *HugoSites) LanguageSet() map[string]bool { set := make(map[string]bool) for _, s := range h.Sites { - set[s.Language.Lang] = true + set[s.language.Lang] = true } return set } @@ -129,14 +222,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) { func (h *HugoSites) langSite() map[string]*Site { m := make(map[string]*Site) for _, s := range h.Sites { - m[s.Language.Lang] = s + m[s.language.Lang] = s } return m } // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. -func (h *HugoSites) GetContentPage(filename string) *Page { +func (h *HugoSites) GetContentPage(filename string) page.Page { for _, s := range h.Sites { pos := s.rawAllPages.findPagePosByFilename(filename) if pos == -1 { @@ -178,10 +271,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { running: cfg.Running, multilingual: langConfig, multihost: cfg.Cfg.GetBool("multihost"), - Sites: sites} + Sites: sites, + init: &hugoSitesInit{ + data: lazy.New(), + gitInfo: lazy.New(), + translations: lazy.New(), + }, + } + + h.fatalErrorHandler = &fatalErrorHandler{ + h: h, + donec: make(chan bool), + } + + h.init.data.Add(func() (interface{}, error) { + err := h.loadData(h.PathSpec.BaseFs.Data.Fs) + return err, nil + }) + + h.init.translations.Add(func() (interface{}, error) { + if len(h.Sites) > 1 { + allTranslations := pagesToTranslationsMap(h.Sites) + assignTranslationsToPages(allTranslations, h.Sites) + } + + return nil, nil + }) + + h.init.gitInfo.Add(func() (interface{}, error) { + err := h.loadGitInfo() + return nil, err + }) for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(cfg, sites...); err != nil { @@ -197,14 +320,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { h.ContentChanges = contentChangeTracker } - if err := h.initGitInfo(); err != nil { - return nil, err - } - return h, nil } -func (h *HugoSites) initGitInfo() error { +func (h *HugoSites) loadGitInfo() error { if h.Cfg.GetBool("enableGitInfo") { gi, err := newGitInfo(h.Cfg) if err != nil { @@ -247,16 +366,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error { d.Site = &s.Info - siteConfig, err := loadSiteConfig(s.Language) + siteConfig, err := loadSiteConfig(s.language) if err != nil { return err } - s.siteConfig = siteConfig - s.siteRefLinker, err = newSiteRefLinker(s.Language, s) + s.siteConfigConfig = siteConfig + s.siteRefLinker, err = newSiteRefLinker(s.language, s) return err } - cfg.Language = s.Language + cfg.Language = s.language cfg.MediaTypes = s.mediaTypesConfig cfg.OutputFormats = s.outputFormatsConfig @@ -347,11 +466,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) { return sites, nil } -// Reset resets the sites and template caches, making it ready for a full rebuild. -func (h *HugoSites) reset() { - for i, s := range h.Sites { - h.Sites[i] = s.reset() +// Reset resets the sites and template caches etc., making it ready for a full rebuild. +func (h *HugoSites) reset(config *BuildCfg) { + if config.ResetState { + for i, s := range h.Sites { + h.Sites[i] = s.reset() + if r, ok := s.Fs.Destination.(hugofs.Reseter); ok { + r.Reset() + } + } + } + + h.fatalErrorHandler = &fatalErrorHandler{ + h: h, + donec: make(chan bool), } + + h.init.Reset() } // resetLogs resets the log counters etc. Used to do a new build on the same sites. @@ -387,7 +518,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error { h.Sites = sites for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(depsCfg, sites...); err != nil { @@ -435,7 +566,7 @@ type BuildCfg struct { // Note that a page does not have to have a content page / file. // For regular builds, this will allways return true. // TODO(bep) rename/work this. -func (cfg *BuildCfg) shouldRender(p *Page) bool { +func (cfg *BuildCfg) shouldRender(p *pageState) bool { if p.forceRender { p.forceRender = false return true @@ -445,15 +576,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool { return true } - if cfg.RecentlyVisited[p.RelPermalink()] { - if cfg.PartialReRender { - _ = p.initMainOutputFormat() - } - return true - } - - if cfg.whatChanged != nil && p.File != nil { - return cfg.whatChanged.files[p.File.Filename()] + if cfg.whatChanged != nil && p.File() != nil { + return cfg.whatChanged.files[p.File().Filename()] } return false @@ -477,100 +601,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { return nil } - // TODO(bep) DRY - sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap")) - s := h.Sites[0] smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"} return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", - sitemapDefault.Filename, h.toSiteInfos(), smLayouts...) -} - -func (h *HugoSites) assignMissingTranslations() error { - - // This looks heavy, but it should be a small number of nodes by now. - allPages := h.findAllPagesByKindNotIn(KindPage) - for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { - nodes := h.findPagesByKindIn(nodeType, allPages) - - // Assign translations - for _, t1 := range nodes { - for _, t2 := range nodes { - if t1.isNewTranslation(t2) { - t1.translations = append(t1.translations, t2) - } - } - } - } - - // Now we can sort the translations. - for _, p := range allPages { - if len(p.translations) > 0 { - pageBy(languagePageSort).Sort(p.translations) - } - } - return nil - + s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...) } // createMissingPages creates home page, taxonomies etc. that isnt't created as an // effect of having a content file. func (h *HugoSites) createMissingPages() error { - var newPages Pages + var newPages pageStatePages for _, s := range h.Sites { - if s.isEnabled(KindHome) { + if s.isEnabled(page.KindHome) { // home pages - home := s.findPagesByKind(KindHome) - if len(home) > 1 { + homes := s.findWorkPagesByKind(page.KindHome) + if len(homes) > 1 { panic("Too many homes") } - if len(home) == 0 { - n := s.newHomePage() - s.Pages = append(s.Pages, n) - newPages = append(newPages, n) + var home *pageState + if len(homes) == 0 { + home = s.newPage(page.KindHome) + s.workAllPages = append(s.workAllPages, home) + newPages = append(newPages, home) + } else { + home = homes[0] } + + s.home = home } // Will create content-less root sections. newSections := s.assembleSections() - s.Pages = append(s.Pages, newSections...) + s.workAllPages = append(s.workAllPages, newSections...) newPages = append(newPages, newSections...) + taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm) + taxonomyEnabled := s.isEnabled(page.KindTaxonomy) + // taxonomy list and terms pages - taxonomies := s.Language.GetStringMapString("taxonomies") + taxonomies := s.Language().GetStringMapString("taxonomies") if len(taxonomies) > 0 { - taxonomyPages := s.findPagesByKind(KindTaxonomy) - taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm) + taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy) + taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm) + + // Make them navigable from WeightedPage etc. + for _, p := range taxonomyPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, p := range taxonomyTermsPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, plural := range taxonomies { - if s.isEnabled(KindTaxonomyTerm) { + if taxonomyTermEnabled { foundTaxonomyTermsPage := false for _, p := range taxonomyTermsPages { - if p.sectionsPath() == plural { + if p.SectionsPath() == plural { foundTaxonomyTermsPage = true break } } if !foundTaxonomyTermsPage { - n := s.newTaxonomyTermsPage(plural) - s.Pages = append(s.Pages, n) + n := s.newPage(page.KindTaxonomyTerm, plural) + n.getTaxonomyNodeInfo().TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } - if s.isEnabled(KindTaxonomy) { - for key := range s.Taxonomies[plural] { + if taxonomyEnabled { + for termKey := range s.Taxonomies[plural] { + foundTaxonomyPage := false - origKey := key - if s.Info.preserveTaxonomyNames { - key = s.PathSpec.MakePathSanitized(key) - } for _, p := range taxonomyPages { - sectionsPath := p.sectionsPath() + sectionsPath := p.SectionsPath() if !strings.HasPrefix(sectionsPath, plural) { continue @@ -579,20 +688,21 @@ func (h *HugoSites) createMissingPages() error { singularKey := strings.TrimPrefix(sectionsPath, plural) singularKey = strings.TrimPrefix(singularKey, "/") - // Some people may have /authors/MaxMustermann etc. as paths. - // p.sections contains the raw values from the file system. - // See https://github.com/gohugoio/hugo/issues/4238 - singularKey = s.PathSpec.MakePathSanitized(singularKey) - - if singularKey == key { + if singularKey == termKey { foundTaxonomyPage = true break } } if !foundTaxonomyPage { - n := s.newTaxonomyPage(plural, origKey) - s.Pages = append(s.Pages, n) + info := s.taxonomyNodes.Get(plural, termKey) + if info == nil { + panic("no info found") + } + + n := s.newTaxonomyPage(info.term, info.plural, info.termKey) + info.TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } @@ -601,24 +711,6 @@ func (h *HugoSites) createMissingPages() error { } } - if len(newPages) > 0 { - // This resorting is unfortunate, but it also needs to be sorted - // when sections are created. - first := h.Sites[0] - - first.AllPages = append(first.AllPages, newPages...) - - first.AllPages.sort() - - for _, s := range h.Sites { - s.Pages.sort() - } - - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].AllPages = first.AllPages - } - } - return nil } @@ -628,61 +720,58 @@ func (h *HugoSites) removePageByFilename(filename string) { } } -func (h *HugoSites) setupTranslations() { +func (h *HugoSites) createPageCollections() error { for _, s := range h.Sites { for _, p := range s.rawAllPages { - if p.Kind == kindUnknown { - p.Kind = p.kindFromSections() - } - - if !p.s.isEnabled(p.Kind) { + if !s.isEnabled(p.Kind()) { continue } - shouldBuild := p.shouldBuild() - s.updateBuildStats(p) + shouldBuild := s.shouldBuild(p) + s.buildStats.update(p) if shouldBuild { - if p.headless { + if p.m.headless { s.headlessPages = append(s.headlessPages, p) } else { - s.Pages = append(s.Pages, p) + s.workAllPages = append(s.workAllPages, p) } } } } - allPages := make(Pages, 0) + allPages := newLazyPagesFactory(func() page.Pages { + var pages page.Pages + for _, s := range h.Sites { + pages = append(pages, s.Pages()...) + } - for _, s := range h.Sites { - allPages = append(allPages, s.Pages...) - } + page.SortByDefault(pages) - allPages.sort() + return pages + }) - for _, s := range h.Sites { - s.AllPages = allPages - } + allRegularPages := newLazyPagesFactory(func() page.Pages { + return h.findPagesByKindIn(page.KindPage, allPages.get()) + }) - // Pull over the collections from the master site - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].Data = h.Sites[0].Data + for _, s := range h.Sites { + s.PageCollections.allPages = allPages + s.PageCollections.allRegularPages = allRegularPages } - if len(h.Sites) > 1 { - allTranslations := pagesToTranslationsMap(allPages) - assignTranslationsToPages(allTranslations, allPages) - } + return nil } -func (s *Site) preparePagesForRender(start bool) error { - for _, p := range s.Pages { - if err := p.prepareForRender(start); err != nil { +func (s *Site) preparePagesForRender(idx int) error { + + for _, p := range s.workAllPages { + if err := p.initOutputFormat(idx); err != nil { return err } } for _, p := range s.headlessPages { - if err := p.prepareForRender(start); err != nil { + if err := p.initOutputFormat(idx); err != nil { return err } } @@ -691,62 +780,141 @@ func (s *Site) preparePagesForRender(start bool) error { } // Pages returns all pages for all sites. -func (h *HugoSites) Pages() Pages { - return h.Sites[0].AllPages +func (h *HugoSites) Pages() page.Pages { + return h.Sites[0].AllPages() } -func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) { - if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 { - p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName()) - err := p.shortcodeState.executeShortcodesForDelta(p) +func (h *HugoSites) loadData(fs afero.Fs) (err error) { + spec := source.NewSourceSpec(h.PathSpec, fs) + fileSystem := spec.NewFilesystem("") + h.data = make(map[string]interface{}) + for _, r := range fileSystem.Files() { + if err := h.handleDataFile(r); err != nil { + return err + } + } - if err != nil { + return +} + +func (h *HugoSites) handleDataFile(r source.ReadableFile) error { + var current map[string]interface{} - return rawContentCopy, err + f, err := r.Open() + if err != nil { + return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName()) + } + defer f.Close() + + // Crawl in data tree to insert data + current = h.data + keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator) + // The first path element is the virtual folder (typically theme name), which is + // not part of the key. + if len(keyParts) > 1 { + for _, key := range keyParts[1:] { + if key != "" { + if _, ok := current[key]; !ok { + current[key] = make(map[string]interface{}) + } + current = current[key].(map[string]interface{}) + } } + } - rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes) + data, err := h.readData(r) + if err != nil { + return h.errWithFileContext(err, r) + } - if err != nil { - p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error()) + if data == nil { + return nil + } + + // filepath.Walk walks the files in lexical order, '/' comes before '.' + // this warning could happen if + // 1. A theme uses the same key; the main data folder wins + // 2. A sub folder uses the same key: the sub folder wins + higherPrecedentData := current[r.BaseFileName()] + + switch data.(type) { + case nil: + // hear the crickets? + + case map[string]interface{}: + + switch higherPrecedentData.(type) { + case nil: + current[r.BaseFileName()] = data + case map[string]interface{}: + // merge maps: insert entries from data for keys that + // don't already exist in higherPrecedentData + higherPrecedentMap := higherPrecedentData.(map[string]interface{}) + for key, value := range data.(map[string]interface{}) { + if _, exists := higherPrecedentMap[key]; exists { + h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path()) + } else { + higherPrecedentMap[key] = value + } + } + default: + // can't merge: higherPrecedentData is not a map + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) + } + + case []interface{}: + if higherPrecedentData == nil { + current[r.BaseFileName()] = data + } else { + // we don't merge array data + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) } + + default: + h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName()) } - return rawContentCopy, nil + return nil } -func (s *Site) updateBuildStats(page *Page) { - if page.IsDraft() { - s.draftCount++ +func (h *HugoSites) errWithFileContext(err error, f source.File) error { + rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo) + if !ok { + return err } - if page.IsFuture() { - s.futureCount++ - } + realFilename := rfi.RealFilename() - if page.IsExpired() { - s.expiredCount++ - } -} + err, _ = herrors.WithFileContextForFile( + err, + realFilename, + realFilename, + h.SourceSpec.Fs.Source, + herrors.SimpleLineMatcher) -func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindNotIn(kind, inPages) + return err } -func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindIn(kind, inPages) -} +func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) { + file, err := f.Open() + if err != nil { + return nil, errors.Wrap(err, "readData: failed to open data file") + } + defer file.Close() + content := helpers.ReaderToBytes(file) -func (h *HugoSites) findAllPagesByKind(kind string) Pages { - return h.findPagesByKindIn(kind, h.Sites[0].AllPages) + format := metadecoders.FormatFromString(f.Extension()) + return metadecoders.Default.Unmarshal(content, format) } -func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages { - return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages) +func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { + return h.Sites[0].findPagesByKindIn(kind, inPages) } -func (h *HugoSites) findPagesByShortcode(shortcode string) Pages { - var pages Pages +func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages { + var pages page.Pages for _, s := range h.Sites { pages = append(pages, s.findPagesByShortcode(shortcode)...) } diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index ec5070fa814..96519444481 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -15,7 +15,12 @@ package hugolib import ( "bytes" + "context" "fmt" + "runtime/trace" + "sort" + + "github.com/gohugoio/hugo/output" "errors" @@ -26,6 +31,9 @@ import ( // Build builds all sites. If filesystem events are provided, // this is considered to be a potential partial rebuild. func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { + ctx, task := trace.NewTask(context.Background(), "Build") + defer task.End() + errCollector := h.StartErrorCollector() errs := make(chan error) @@ -71,22 +79,36 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return err } } else { - if err := h.init(conf); err != nil { + if err := h.initSites(conf); err != nil { return err } } - if err := h.process(conf, events...); err != nil { + var err error + + f := func() { + err = h.process(conf, events...) + } + trace.WithRegion(ctx, "process", f) + if err != nil { return err } - if err := h.assemble(conf); err != nil { + f = func() { + err = h.assemble(conf) + } + trace.WithRegion(ctx, "assemble", f) + if err != nil { return err } + return nil } - prepareErr = prepare() + f := func() { + prepareErr = prepare() + } + trace.WithRegion(ctx, "prepare", f) if prepareErr != nil { h.SendError(prepareErr) } @@ -94,7 +116,12 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { } if prepareErr == nil { - if err := h.render(conf); err != nil { + var err error + f := func() { + err = h.render(conf) + } + trace.WithRegion(ctx, "render", f) + if err != nil { h.SendError(err) } } @@ -132,17 +159,8 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { // Build lifecycle methods below. // The order listed matches the order of execution. -func (h *HugoSites) init(config *BuildCfg) error { - - for _, s := range h.Sites { - if s.PageCollections == nil { - s.PageCollections = newPageCollections() - } - } - - if config.ResetState { - h.reset() - } +func (h *HugoSites) initSites(config *BuildCfg) error { + h.reset(config) if config.NewConfig != nil { if err := h.createSitesFromConfig(config.NewConfig); err != nil { @@ -155,28 +173,22 @@ func (h *HugoSites) init(config *BuildCfg) error { func (h *HugoSites) initRebuild(config *BuildCfg) error { if config.NewConfig != nil { - return errors.New("Rebuild does not support 'NewConfig'.") + return errors.New("rebuild does not support 'NewConfig'") } if config.ResetState { - return errors.New("Rebuild does not support 'ResetState'.") + return errors.New("rebuild does not support 'ResetState'") } if !h.running { - return errors.New("Rebuild called when not in watch mode") - } - - if config.whatChanged.source { - // This is for the non-renderable content pages (rarely used, I guess). - // We could maybe detect if this is really needed, but it should be - // pretty fast. - h.TemplateHandler().RebuildClone() + return errors.New("rebuild called when not in watch mode") } for _, s := range h.Sites { s.resetBuildState() } + h.reset(config) h.resetLogs() helpers.InitLoggers() @@ -203,14 +215,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error { } func (h *HugoSites) assemble(config *BuildCfg) error { - if config.whatChanged.source { - for _, s := range h.Sites { - s.createTaxonomiesEntries() - } - } - - // TODO(bep) we could probably wait and do this in one go later - h.setupTranslations() if len(h.Sites) > 1 { // The first is initialized during process; initialize the rest @@ -221,47 +225,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } } + if err := h.createPageCollections(); err != nil { + return err + } + if config.whatChanged.source { for _, s := range h.Sites { - if err := s.buildSiteMeta(); err != nil { + if err := s.assembleTaxonomies(); err != nil { return err } } } + // Create pagexs for the section pages etc. without content file. if err := h.createMissingPages(); err != nil { return err } for _, s := range h.Sites { - for _, pages := range []Pages{s.Pages, s.headlessPages} { - for _, p := range pages { - // May have been set in front matter - if len(p.outputFormats) == 0 { - p.outputFormats = s.outputFormats[p.Kind] - } - - if p.headless { - // headless = 1 output format only - p.outputFormats = p.outputFormats[:1] - } - for _, r := range p.Resources.ByType(pageResourceType) { - r.(*Page).outputFormats = p.outputFormats - } - - if err := p.initPaths(); err != nil { - return err - } - - } - } - s.assembleMenus() - s.refreshPageCaches() s.setupSitePages() - } - - if err := h.assignMissingTranslations(); err != nil { - return err + sort.Stable(s.workAllPages) } return nil @@ -269,42 +252,69 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } func (h *HugoSites) render(config *BuildCfg) error { + siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost} + if !config.PartialReRender { + h.renderFormats = output.Formats{} + h.renderFormatsDistinct = output.Formats{} + seen := make(map[string]bool) for _, s := range h.Sites { s.initRenderFormats() + h.renderFormats = append(h.renderFormats, s.renderFormats...) + } + + for _, f := range h.renderFormats { + if !seen[f.Name] { + h.renderFormatsDistinct = append(h.renderFormatsDistinct, f) + seen[f.Name] = true + } } } + i := 0 for _, s := range h.Sites { - for i, rf := range s.renderFormats { - for _, s2 := range h.Sites { - // We render site by site, but since the content is lazily rendered - // and a site can "borrow" content from other sites, every site - // needs this set. - s2.rc = &siteRenderingContext{Format: rf} - - isRenderingSite := s == s2 - - if !config.PartialReRender { - if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil { - return err + for siteOutIdx, renderFormat := range s.renderFormats { + siteRenderContext.outIdx = siteOutIdx + siteRenderContext.sitesOutIdx = i + i++ + + select { + case <-h.Done(): + return nil + default: + // For the non-renderable pages, we use the content iself as + // template and we may have to re-parse and execute it for + // each output format. + h.TemplateHandler().RebuildClone() + + for _, s2 := range h.Sites { + // We render site by site, but since the content is lazily rendered + // and a site can "borrow" content from other sites, every site + // needs this set. + s2.rc = &siteRenderingContext{Format: renderFormat} + + if !config.PartialReRender { + if err := s2.preparePagesForRender(siteRenderContext.sitesOutIdx); err != nil { + return err + } } } - } - - if !config.SkipRender { - if config.PartialReRender { - if err := s.renderPages(config); err != nil { - return err - } - } else { - if err := s.render(config, i); err != nil { - return err + if !config.SkipRender { + if config.PartialReRender { + if err := s.renderPages(siteRenderContext); err != nil { + return err + } + } else { + if err := s.render(siteRenderContext); err != nil { + return err + } } } } + } + } if !config.SkipRender { diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go index dd80946e8ac..9ac7c5c6275 100644 --- a/hugolib/hugo_sites_build_errors_test.go +++ b/hugolib/hugo_sites_build_errors_test.go @@ -7,6 +7,9 @@ import ( "runtime" "strings" "testing" + "time" + + "github.com/fortytw2/leaktest" "github.com/gohugoio/hugo/common/herrors" "github.com/stretchr/testify/require" @@ -20,25 +23,24 @@ type testSiteBuildErrorAsserter struct { func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext { t.assert.NotNil(err, t.name) ferr := herrors.UnwrapErrorWithFileContext(err) - t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, trace())) + t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, stackTrace())) return ferr } func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) { fe := t.getFileError(err) - t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, trace())) + t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, stackTrace())) } func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) { // The error message will contain filenames with OS slashes. Normalize before compare. e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2) - t.assert.Contains(e2, e1, trace()) + t.assert.Contains(e2, e1, stackTrace()) } func TestSiteBuildErrors(t *testing.T) { t.Parallel() - assert := require.New(t) const ( yamlcontent = "yamlcontent" @@ -88,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertCreateError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(1, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(1, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error()) }, @@ -103,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -118,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -143,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(7, fe.Position().LineNumber) - assert.Equal("md", fe.ChromaLexer) + a.assert.Equal(7, fe.Position().LineNumber) + a.assert.Equal("md", fe.ChromaLexer) // Make sure that it contains both the content file and template a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error()) a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error()) @@ -158,10 +160,10 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(7, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("md", fe.ChromaLexer) - a.assertErrorMessage("\"content/myyaml.md:7:14\": failed to extract shortcode: template for shortcode \"nono\" not found", fe.Error()) + a.assert.Equal(7, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("md", fe.ChromaLexer) + a.assertErrorMessage(`"content/myyaml.md:7:14": failed to extract shortcode: failed to handle template for shortcode "nono": template for shortcode "nono" not found`, fe.Error()) }, }, { @@ -182,8 +184,8 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(6, fe.Position().LineNumber) - assert.Equal("toml", fe.ErrorContext.ChromaLexer) + a.assert.Equal(6, fe.Position().LineNumber) + a.assert.Equal("toml", fe.ErrorContext.ChromaLexer) }, }, @@ -196,8 +198,8 @@ func TestSiteBuildErrors(t *testing.T) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(3, fe.Position().LineNumber) - assert.Equal("json", fe.ErrorContext.ChromaLexer) + a.assert.Equal(3, fe.Position().LineNumber) + a.assert.Equal("json", fe.ErrorContext.ChromaLexer) }, }, @@ -210,42 +212,43 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { - assert.Error(err) + a.assert.Error(err) // This is fixed in latest Go source if regexp.MustCompile("devel|12").MatchString(runtime.Version()) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(21, fe.Position().ColumnNumber) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(21, fe.Position().ColumnNumber) } else { - assert.Contains(err.Error(), `execute of template failed: panic in Execute`) + a.assert.Contains(err.Error(), `execute of template failed: panic in Execute`) } }, }, } for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert := require.New(t) + errorAsserter := testSiteBuildErrorAsserter{ + assert: assert, + name: test.name, + } - errorAsserter := testSiteBuildErrorAsserter{ - assert: assert, - name: test.name, - } + b := newTestSitesBuilder(t).WithSimpleConfigFile() - b := newTestSitesBuilder(t).WithSimpleConfigFile() + f := func(fileType, content string) string { + if fileType != test.fileType { + return content + } + return test.fileFixer(content) - f := func(fileType, content string) string { - if fileType != test.fileType { - return content } - return test.fileFixer(content) - } - - b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1 + b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1 SHORTCODE L2 SHORTCODE L3: SHORTCODE L4: {{ .Page.Title }} `)) - b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1 + b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1 BASEOF L2 BASEOF L3 BASEOF L4{{ if .Title }}{{ end }} @@ -253,7 +256,7 @@ BASEOF L4{{ if .Title }}{{ end }} BASEOF L6 `)) - b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }} + b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }} SINGLE L2: SINGLE L3: SINGLE L4: @@ -261,7 +264,7 @@ SINGLE L5: {{ .Title }} {{ .Content }} {{ end }} `)) - b.WithContent("myyaml.md", f(yamlcontent, `--- + b.WithContent("myyaml.md", f(yamlcontent, `--- title: "The YAML" --- @@ -275,7 +278,7 @@ The end. `)) - b.WithContent("mytoml.md", f(tomlcontent, `+++ + b.WithContent("mytoml.md", f(tomlcontent, `+++ title = "The TOML" p1 = "v" p2 = "v" @@ -288,7 +291,7 @@ Some content. `)) - b.WithContent("myjson.md", f(jsoncontent, `{ + b.WithContent("myjson.md", f(jsoncontent, `{ "title": "This is a title", "description": "This is a description." } @@ -298,26 +301,30 @@ Some content. `)) - createErr := b.CreateSitesE() - if test.assertCreateError != nil { - test.assertCreateError(errorAsserter, createErr) - } else { - assert.NoError(createErr) - } - - if createErr == nil { - buildErr := b.BuildE(BuildCfg{}) - if test.assertBuildError != nil { - test.assertBuildError(errorAsserter, buildErr) + createErr := b.CreateSitesE() + if test.assertCreateError != nil { + test.assertCreateError(errorAsserter, createErr) } else { - assert.NoError(buildErr) + assert.NoError(createErr) } - } + + if createErr == nil { + buildErr := b.BuildE(BuildCfg{}) + if test.assertBuildError != nil { + test.assertBuildError(errorAsserter, buildErr) + } else { + assert.NoError(buildErr) + } + } + }) } } // https://github.com/gohugoio/hugo/issues/5375 func TestSiteBuildTimeout(t *testing.T) { + if !isCI() { + defer leaktest.CheckTimeout(t, 10*time.Second)() + } b := newTestSitesBuilder(t) b.WithConfigFile("toml", ` @@ -342,6 +349,6 @@ title: "A page" } - b.CreateSites().Build(BuildCfg{}) + b.CreateSites().BuildFail(BuildCfg{}) } diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 83b96b7f4e7..015306ee727 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -1,16 +1,16 @@ package hugolib import ( - "bytes" "fmt" "strings" "testing" - "html/template" "os" "path/filepath" "time" + "github.com/gohugoio/hugo/resources/page" + "github.com/fortytw2/leaktest" "github.com/fsnotify/fsnotify" "github.com/gohugoio/hugo/helpers" @@ -66,8 +66,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true)) - doc1en := enSite.RegularPages[0] - doc1fr := frSite.RegularPages[0] + doc1en := enSite.RegularPages()[0] + doc1fr := frSite.RegularPages()[0] enPerm := doc1en.Permalink() enRelPerm := doc1en.RelPermalink() @@ -100,7 +100,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { // Check list pages b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour") b.AssertFileContent("public/en/sect/index.html", "List", "Hello") - b.AssertFileContent(pathMod("public/fr/plaques/frtag1/index.html"), "Taxonomy List", "Bonjour") + b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour") b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello") // Check sitemaps @@ -126,8 +126,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { pathMod(`doc2\n\n

some content") + b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n

doc2

\n\n

some content") enSite := sites[0] frSite := sites[1] - assert.Len(enSite.RegularPages, 5) - assert.Len(frSite.RegularPages, 4) + assert.Len(enSite.RegularPages(), 5) + assert.Len(frSite.RegularPages(), 4) // Verify translations b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello") @@ -458,6 +435,10 @@ func TestMultiSitesRebuild(t *testing.T) { b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello") + homeEn := enSite.getPage(page.KindHome) + require.NotNil(t, homeEn) + assert.Len(homeEn.Translations(), 3) + contentFs := b.H.BaseFs.Content.Fs for i, this := range []struct { @@ -478,15 +459,15 @@ func TestMultiSitesRebuild(t *testing.T) { }, []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}}, func(t *testing.T) { - assert.Len(enSite.RegularPages, 4, "1 en removed") + assert.Len(enSite.RegularPages(), 4, "1 en removed") // Check build stats - require.Equal(t, 1, enSite.draftCount, "Draft") - require.Equal(t, 1, enSite.futureCount, "Future") - require.Equal(t, 1, enSite.expiredCount, "Expired") - require.Equal(t, 0, frSite.draftCount, "Draft") - require.Equal(t, 1, frSite.futureCount, "Future") - require.Equal(t, 1, frSite.expiredCount, "Expired") + require.Equal(t, 1, enSite.buildStats.draftCount, "Draft") + require.Equal(t, 1, enSite.buildStats.futureCount, "Future") + require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired") + require.Equal(t, 0, frSite.buildStats.draftCount, "Draft") + require.Equal(t, 1, frSite.buildStats.futureCount, "Future") + require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired") }, }, { @@ -501,12 +482,12 @@ func TestMultiSitesRebuild(t *testing.T) { {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create}, }, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6) - assert.Len(enSite.AllPages, 34) - assert.Len(frSite.RegularPages, 5) - require.Equal(t, "new_fr_1", frSite.RegularPages[3].title) - require.Equal(t, "new_en_2", enSite.RegularPages[0].title) - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + assert.Len(enSite.RegularPages(), 6) + assert.Len(enSite.AllPages(), 34) + assert.Len(frSite.RegularPages(), 5) + require.Equal(t, "new_fr_1", frSite.RegularPages()[3].Title()) + require.Equal(t, "new_en_2", enSite.RegularPages()[0].Title()) + require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title()) rendered := readDestination(t, fs, "public/en/new1/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) @@ -521,7 +502,7 @@ func TestMultiSitesRebuild(t *testing.T) { }, []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}}, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6) + assert.Len(enSite.RegularPages(), 6) doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") require.True(t, strings.Contains(doc1, "CHANGED"), doc1) @@ -539,8 +520,8 @@ func TestMultiSitesRebuild(t *testing.T) { {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename}, }, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6, "Rename") - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + assert.Len(enSite.RegularPages(), 6, "Rename") + require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title()) rendered := readDestination(t, fs, "public/en/new1renamed/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) }}, @@ -554,9 +535,9 @@ func TestMultiSitesRebuild(t *testing.T) { }, []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}}, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6) - assert.Len(enSite.AllPages, 34) - assert.Len(frSite.RegularPages, 5) + assert.Len(enSite.RegularPages(), 6) + assert.Len(enSite.AllPages(), 34) + assert.Len(frSite.RegularPages(), 5) doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") require.True(t, strings.Contains(doc1, "Template Changed"), doc1) }, @@ -571,18 +552,18 @@ func TestMultiSitesRebuild(t *testing.T) { }, []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}}, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6) - assert.Len(enSite.AllPages, 34) - assert.Len(frSite.RegularPages, 5) + assert.Len(enSite.RegularPages(), 6) + assert.Len(enSite.AllPages(), 34) + assert.Len(frSite.RegularPages(), 5) docEn := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") require.True(t, strings.Contains(docEn, "Hello"), "No Hello") docFr := readDestination(t, fs, "public/fr/sect/doc1/index.html") require.True(t, strings.Contains(docFr, "Salut"), "No Salut") - homeEn := enSite.getPage(KindHome) + homeEn := enSite.getPage(page.KindHome) require.NotNil(t, homeEn) assert.Len(homeEn.Translations(), 3) - require.Equal(t, "fr", homeEn.Translations()[0].Lang()) + require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang) }, }, @@ -595,9 +576,9 @@ func TestMultiSitesRebuild(t *testing.T) { {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write}, }, func(t *testing.T) { - assert.Len(enSite.RegularPages, 6) - assert.Len(enSite.AllPages, 34) - assert.Len(frSite.RegularPages, 5) + assert.Len(enSite.RegularPages(), 6) + assert.Len(enSite.AllPages(), 34) + assert.Len(frSite.RegularPages(), 5) b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello") }, @@ -617,23 +598,6 @@ func TestMultiSitesRebuild(t *testing.T) { this.assertFunc(t) } - // Check that the drafts etc. are not built/processed/rendered. - assertShouldNotBuild(t, b.H) - -} - -func assertShouldNotBuild(t *testing.T, sites *HugoSites) { - s := sites.Sites[0] - - for _, p := range s.rawAllPages { - // No HTML when not processed - require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("`, + `TOC2: