Make Page an interface

The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct.
This is all a preparation step for issue  #5074, "pages from other data sources".

But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes.

Most notable changes:

* The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday.
  This means that any markdown will partake in the global ToC and footnote context etc.
* The Custom Output formats are now "fully virtualized". This removes many of the current limitations.
* The taxonomy list type now has a reference to the `Page` object.
  This improves the taxonomy template `.Title` situation and make common template constructs much simpler.

See #5074
Fixes #5763
Fixes #5758
Fixes #5090
Fixes #5204
Fixes #4695
Fixes #5607
Fixes #5707
Fixes #5719
Fixes #3113
Fixes #5706
Fixes #5767
Fixes #5723
Fixes #5769
Fixes #5770
Fixes #5771
Fixes #5759
Fixes #5776
Fixes #5777
Fixes #5778
This commit is contained in:
Bjørn Erik Pedersen 2019-01-02 12:33:26 +01:00
parent 44f5c1c14c
commit 597e418cb0
No known key found for this signature in database
GPG Key ID: 330E6E2BD4859D8F
206 changed files with 14442 additions and 9679 deletions

2
benchbep.sh Executable file
View File

@ -0,0 +1,2 @@
gobench -package=./hugolib -bench="BenchmarkSiteBuilding/TOML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench
benchcmp -best 0.bench 1.bench

529
codegen/methods.go Normal file
View File

@ -0,0 +1,529 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package codegen contains helpers for code generation.
package codegen
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path"
"path/filepath"
"reflect"
"regexp"
"sort"
"strings"
"sync"
)
// Make room for insertions
const weightWidth = 1000
// NewInspector creates a new Inspector given a source root.
func NewInspector(root string) *Inspector {
return &Inspector{ProjectRootDir: root}
}
// Inspector provides methods to help code generation. It uses a combination
// of reflection and source code AST to do the heavy lifting.
type Inspector struct {
ProjectRootDir string
init sync.Once
// Determines method order. Go's reflect sorts lexicographically, so
// we must parse the source to preserve this order.
methodWeight map[string]map[string]int
}
// MethodsFromTypes create a method set from the include slice, excluding any
// method in exclude.
func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.Type) Methods {
c.parseSource()
var methods Methods
var excludes = make(map[string]bool)
if len(exclude) > 0 {
for _, m := range c.MethodsFromTypes(exclude, nil) {
excludes[m.Name] = true
}
}
// There may be overlapping interfaces in types. Do a simple check for now.
seen := make(map[string]bool)
nameAndPackage := func(t reflect.Type) (string, string) {
var name, pkg string
isPointer := t.Kind() == reflect.Ptr
if isPointer {
t = t.Elem()
}
pkgPrefix := ""
if pkgPath := t.PkgPath(); pkgPath != "" {
pkgPath = strings.TrimSuffix(pkgPath, "/")
_, shortPath := path.Split(pkgPath)
pkgPrefix = shortPath + "."
pkg = pkgPath
}
name = t.Name()
if name == "" {
// interface{}
name = t.String()
}
if isPointer {
pkgPrefix = "*" + pkgPrefix
}
name = pkgPrefix + name
return name, pkg
}
for _, t := range include {
for i := 0; i < t.NumMethod(); i++ {
m := t.Method(i)
if excludes[m.Name] || seen[m.Name] {
continue
}
seen[m.Name] = true
if m.PkgPath != "" {
// Not exported
continue
}
numIn := m.Type.NumIn()
ownerName, _ := nameAndPackage(t)
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
for i := 0; i < numIn; i++ {
in := m.Type.In(i)
name, pkg := nameAndPackage(in)
if pkg != "" {
method.Imports = append(method.Imports, pkg)
}
method.In = append(method.In, name)
}
numOut := m.Type.NumOut()
if numOut > 0 {
for i := 0; i < numOut; i++ {
out := m.Type.Out(i)
name, pkg := nameAndPackage(out)
if pkg != "" {
method.Imports = append(method.Imports, pkg)
}
method.Out = append(method.Out, name)
}
}
methods = append(methods, method)
}
}
sort.SliceStable(methods, func(i, j int) bool {
mi, mj := methods[i], methods[j]
wi := c.methodWeight[mi.OwnerName][mi.Name]
wj := c.methodWeight[mj.OwnerName][mj.Name]
if wi == wj {
return mi.Name < mj.Name
}
return wi < wj
})
return methods
}
func (c *Inspector) parseSource() {
c.init.Do(func() {
if !strings.Contains(c.ProjectRootDir, "hugo") {
panic("dir must be set to the Hugo root")
}
c.methodWeight = make(map[string]map[string]int)
dirExcludes := regexp.MustCompile("docs|examples")
fileExcludes := regexp.MustCompile("autogen")
var filenames []string
filepath.Walk(c.ProjectRootDir, func(path string, info os.FileInfo, err error) error {
if info.IsDir() {
if dirExcludes.MatchString(info.Name()) {
return filepath.SkipDir
}
}
if !strings.HasSuffix(path, ".go") || fileExcludes.MatchString(path) {
return nil
}
filenames = append(filenames, path)
return nil
})
for _, filename := range filenames {
pkg := c.packageFromPath(filename)
fset := token.NewFileSet()
node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
if err != nil {
panic(err)
}
ast.Inspect(node, func(n ast.Node) bool {
switch t := n.(type) {
case *ast.TypeSpec:
if t.Name.IsExported() {
switch it := t.Type.(type) {
case *ast.InterfaceType:
iface := pkg + "." + t.Name.Name
methodNames := collectMethodsRecursive(pkg, it.Methods.List)
weights := make(map[string]int)
weight := weightWidth
for _, name := range methodNames {
weights[name] = weight
weight += weightWidth
}
c.methodWeight[iface] = weights
}
}
}
return true
})
}
// Complement
for _, v1 := range c.methodWeight {
for k2, w := range v1 {
if v, found := c.methodWeight[k2]; found {
for k3, v3 := range v {
v1[k3] = (v3 / weightWidth) + w
}
}
}
}
})
}
func (c *Inspector) packageFromPath(p string) string {
p = filepath.ToSlash(p)
base := path.Base(p)
if !strings.Contains(base, ".") {
return base
}
return path.Base(strings.TrimSuffix(p, base))
}
// Method holds enough information about it to recreate it.
type Method struct {
// The interface we extracted this method from.
Owner reflect.Type
// String version of the above, on the form PACKAGE.NAME, e.g.
// page.Page
OwnerName string
// Method name.
Name string
// Imports needed to satisfy the method signature.
Imports []string
// Argument types, including any package prefix, e.g. string, int, interface{},
// net.Url
In []string
// Return types.
Out []string
}
// Declaration creates a method declaration (without any body) for the given receiver.
func (m Method) Declaration(receiver string) string {
return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStr())
}
// Delegate creates a delegate call string.
func (m Method) Delegate(receiver, delegate string) string {
ret := ""
if len(m.Out) > 0 {
ret = "return "
}
return fmt.Sprintf("%s%s.%s.%s%s", ret, receiverShort(receiver), delegate, m.Name, m.inOutStr())
}
func (m Method) String() string {
return m.Name + m.inStr() + " " + m.outStr() + "\n"
}
func (m Method) inOutStr() string {
if len(m.In) == 0 {
return "()"
}
args := make([]string, len(m.In))
for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d", i)
}
return "(" + strings.Join(args, ", ") + ")"
}
func (m Method) inStr() string {
if len(m.In) == 0 {
return "()"
}
args := make([]string, len(m.In))
for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
}
return "(" + strings.Join(args, ", ") + ")"
}
func (m Method) outStr() string {
if len(m.Out) == 0 {
return ""
}
if len(m.Out) == 1 {
return m.Out[0]
}
return "(" + strings.Join(m.Out, ", ") + ")"
}
// Methods represents a list of methods for one or more interfaces.
// The order matches the defined order in their source file(s).
type Methods []Method
// Imports returns a sorted list of package imports needed to satisfy the
// signatures of all methods.
func (m Methods) Imports() []string {
var pkgImports []string
for _, method := range m {
pkgImports = append(pkgImports, method.Imports...)
}
if len(pkgImports) > 0 {
pkgImports = uniqueNonEmptyStrings(pkgImports)
sort.Strings(pkgImports)
}
return pkgImports
}
// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
// matchin any of the regexps in excludes will be ignored.
func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
var sb strings.Builder
r := receiverShort(receiver)
what := firstToUpper(trimAsterisk(receiver))
pgkName := path.Base(pkgPath)
fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
var methods Methods
var excludeRes = make([]*regexp.Regexp, len(excludes))
for i, exclude := range excludes {
excludeRes[i] = regexp.MustCompile(exclude)
}
for _, method := range m {
// Exclude methods with arguments and incompatible return values
if len(method.In) > 0 || len(method.Out) == 0 || len(method.Out) > 2 {
continue
}
if len(method.Out) == 2 {
if method.Out[1] != "error" {
continue
}
}
for _, re := range excludeRes {
if re.MatchString(method.Name) {
continue
}
}
methods = append(methods, method)
}
for _, method := range methods {
varn := varName(method.Name)
if len(method.Out) == 1 {
fmt.Fprintf(&sb, "\t%s := %s.%s()\n", varn, r, method.Name)
} else {
fmt.Fprintf(&sb, "\t%s, err := %s.%s()\n", varn, r, method.Name)
fmt.Fprint(&sb, "\tif err != nil {\n\t\treturn nil, err\n\t}\n")
}
}
fmt.Fprint(&sb, "\n\ts := struct {\n")
for _, method := range methods {
fmt.Fprintf(&sb, "\t\t%s %s\n", method.Name, typeName(method.Out[0], pgkName))
}
fmt.Fprint(&sb, "\n\t}{\n")
for _, method := range methods {
varn := varName(method.Name)
fmt.Fprintf(&sb, "\t\t%s: %s,\n", method.Name, varn)
}
fmt.Fprint(&sb, "\n\t}\n\n")
fmt.Fprint(&sb, "\treturn json.Marshal(&s)\n}")
pkgImports := append(methods.Imports(), "encoding/json")
if pkgPath != "" {
// Exclude self
for i, pkgImp := range pkgImports {
if pkgImp == pkgPath {
pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
}
}
}
return sb.String(), pkgImports
}
func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
var methodNames []string
for _, m := range f {
if m.Names != nil {
methodNames = append(methodNames, m.Names[0].Name)
continue
}
if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil {
// Embedded interface
methodNames = append(
methodNames,
collectMethodsRecursive(
pkg,
ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...)
} else {
// Embedded, but in a different file/package. Return the
// package.Name and deal with that later.
name := packageName(m.Type)
if !strings.Contains(name, ".") {
// Assume current package
name = pkg + "." + name
}
methodNames = append(methodNames, name)
}
}
return methodNames
}
func firstToLower(name string) string {
return strings.ToLower(name[:1]) + name[1:]
}
func firstToUpper(name string) string {
return strings.ToUpper(name[:1]) + name[1:]
}
func packageName(e ast.Expr) string {
switch tp := e.(type) {
case *ast.Ident:
return tp.Name
case *ast.SelectorExpr:
return fmt.Sprintf("%s.%s", packageName(tp.X), packageName(tp.Sel))
}
return ""
}
func receiverShort(receiver string) string {
return strings.ToLower(trimAsterisk(receiver))[:1]
}
func trimAsterisk(name string) string {
return strings.TrimPrefix(name, "*")
}
func typeName(name, pkg string) string {
return strings.TrimPrefix(name, pkg+".")
}
func uniqueNonEmptyStrings(s []string) []string {
var unique []string
set := map[string]interface{}{}
for _, val := range s {
if val == "" {
continue
}
if _, ok := set[val]; !ok {
unique = append(unique, val)
set[val] = val
}
}
return unique
}
func varName(name string) string {
name = firstToLower(name)
// Adjust some reserved keywords, see https://golang.org/ref/spec#Keywords
switch name {
case "type":
name = "typ"
case "package":
name = "pkg"
// Not reserved, but syntax highlighters has it as a keyword.
case "len":
name = "length"
}
return name
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,13 +11,10 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package hugolib package codegen
import ( type IEmbed interface {
"github.com/gohugoio/hugo/resources/resource" MethodEmbed3(s string) string
) MethodEmbed1() string
MethodEmbed2()
var ( }
_ resource.Resource = (*Page)(nil)
_ resource.Resource = (*PageOutput)(nil)
)

100
codegen/methods_test.go Normal file
View File

@ -0,0 +1,100 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package codegen
import (
"fmt"
"net"
"os"
"reflect"
"testing"
"github.com/gohugoio/hugo/common/herrors"
"github.com/stretchr/testify/require"
)
func TestMethods(t *testing.T) {
var (
zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
zeroI = reflect.TypeOf((*I)(nil)).Elem()
)
dir, _ := os.Getwd()
c := NewInspector(dir)
t.Run("MethodsFromTypes", func(t *testing.T) {
assert := require.New(t)
methods := c.MethodsFromTypes([]reflect.Type{zeroI}, nil)
methodsStr := fmt.Sprint(methods)
assert.Contains(methodsStr, "Method1(arg0 herrors.ErrorContext)")
assert.Contains(methodsStr, "Method7() interface {}")
assert.Contains(methodsStr, "Method0() string\n Method4() string")
assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string\n MethodEmbed1() string")
assert.Contains(methods.Imports(), "github.com/gohugoio/hugo/common/herrors")
})
t.Run("EmbedOnly", func(t *testing.T) {
assert := require.New(t)
methods := c.MethodsFromTypes([]reflect.Type{zeroIEOnly}, nil)
methodsStr := fmt.Sprint(methods)
assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string")
})
t.Run("ToMarshalJSON", func(t *testing.T) {
assert := require.New(t)
m, pkg := c.MethodsFromTypes(
[]reflect.Type{zeroI},
[]reflect.Type{zeroIE}).ToMarshalJSON("*page", "page")
assert.Contains(m, "method6 := p.Method6()")
assert.Contains(m, "Method0: method0,")
assert.Contains(m, "return json.Marshal(&s)")
assert.Contains(pkg, "github.com/gohugoio/hugo/common/herrors")
assert.Contains(pkg, "encoding/json")
fmt.Println(pkg)
})
}
type I interface {
IEmbed
Method0() string
Method4() string
Method1(myerr herrors.ErrorContext)
Method3(myint int, mystring string)
Method5() (string, error)
Method6() *net.IP
Method7() interface{}
Method8() herrors.ErrorContext
method2()
method9() os.FileInfo
}
type IEOnly interface {
IEmbed
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -357,6 +357,13 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
c.changeDetector = changeDetector c.changeDetector = changeDetector
} }
if c.Cfg.GetBool("logPathWarnings") {
fs.Destination = hugofs.NewCreateCountingFs(fs.Destination)
}
// To debug hard-to-find path issues.
//fs.Destination = hugofs.NewStacktracerFs(fs.Destination, `fr/fr`)
err = c.initFs(fs) err = c.initFs(fs)
if err != nil { if err != nil {
return return

View File

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -23,7 +23,6 @@ import (
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/nitro"
) )
type commandsBuilder struct { type commandsBuilder struct {
@ -197,6 +196,12 @@ type hugoBuilderCommon struct {
gc bool gc bool
// Profile flags (for debugging of performance problems)
cpuprofile string
memprofile string
mutexprofile string
traceprofile string
// TODO(bep) var vs string // TODO(bep) var vs string
logging bool logging bool
verbose bool verbose bool
@ -255,13 +260,22 @@ func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages") cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build") cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program")
cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions") cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics") cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.") cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files") cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files") cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations") cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations")
cmd.Flags().BoolP("path-warnings", "", false, "print warnings on duplicate target paths etc.")
cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
// Hide these for now.
cmd.Flags().MarkHidden("profile-cpu")
cmd.Flags().MarkHidden("profile-mem")
cmd.Flags().MarkHidden("profile-mutex")
cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)") cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/gohugoio/hugo/common/types"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
@ -41,7 +43,7 @@ func TestExecute(t *testing.T) {
assert.NoError(resp.Err) assert.NoError(resp.Err)
result := resp.Result result := resp.Result
assert.True(len(result.Sites) == 1) assert.True(len(result.Sites) == 1)
assert.True(len(result.Sites[0].RegularPages) == 1) assert.True(len(result.Sites[0].RegularPages()) == 1)
} }
func TestCommandsPersistentFlags(t *testing.T) { func TestCommandsPersistentFlags(t *testing.T) {
@ -75,6 +77,7 @@ func TestCommandsPersistentFlags(t *testing.T) {
"--port=1366", "--port=1366",
"--renderToDisk", "--renderToDisk",
"--source=mysource", "--source=mysource",
"--path-warnings",
}, func(commands []cmder) { }, func(commands []cmder) {
var sc *serverCmd var sc *serverCmd
for _, command := range commands { for _, command := range commands {
@ -112,6 +115,9 @@ func TestCommandsPersistentFlags(t *testing.T) {
assert.True(cfg.GetBool("gc")) assert.True(cfg.GetBool("gc"))
// The flag is named path-warnings
assert.True(cfg.GetBool("logPathWarnings"))
// The flag is named i18n-warnings // The flag is named i18n-warnings
assert.True(cfg.GetBool("logI18nWarnings")) assert.True(cfg.GetBool("logI18nWarnings"))
@ -183,8 +189,8 @@ func TestCommandsExecute(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
b := newCommandsBuilder().addAll().build()
hugoCmd := newCommandsBuilder().addAll().build().getCommand() hugoCmd := b.getCommand()
test.flags = append(test.flags, "--quiet") test.flags = append(test.flags, "--quiet")
hugoCmd.SetArgs(append(test.commands, test.flags...)) hugoCmd.SetArgs(append(test.commands, test.flags...))
@ -200,6 +206,13 @@ func TestCommandsExecute(t *testing.T) {
assert.NoError(err, fmt.Sprintf("%v", test.commands)) assert.NoError(err, fmt.Sprintf("%v", test.commands))
} }
// Assert that we have not left any development debug artifacts in
// the code.
if b.c != nil {
_, ok := b.c.destinationFs.(types.DevMarker)
assert.False(ok)
}
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"strings" "strings"
"time" "time"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -124,8 +126,8 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
site := h.Sites[0] site := h.Sites[0]
site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files") site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files")
for _, p := range site.AllPages { for _, p := range site.AllPages() {
if err := cc.convertAndSavePage(p, site, format); err != nil { if err := cc.convertAndSavePage(p, site, format); err != nil {
return err return err
} }
@ -133,24 +135,24 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
return nil return nil
} }
func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
// The resources are not in .Site.AllPages. // The resources are not in .Site.AllPages.
for _, r := range p.Resources.ByType("page") { for _, r := range p.Resources().ByType("page") {
if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil { if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
return err return err
} }
} }
if p.Filename() == "" { if p.File() == nil {
// No content file. // No content file.
return nil return nil
} }
errMsg := fmt.Errorf("Error processing file %q", p.Path()) errMsg := fmt.Errorf("Error processing file %q", p.Path())
site.Log.INFO.Println("Attempting to convert", p.LogicalName()) site.Log.INFO.Println("Attempting to convert", p.File().Filename())
f, _ := p.File.(src.ReadableFile) f, _ := p.File().(src.ReadableFile)
file, err := f.Open() file, err := f.Open()
if err != nil { if err != nil {
site.Log.ERROR.Println(errMsg) site.Log.ERROR.Println(errMsg)
@ -186,7 +188,7 @@ func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, ta
newContent.Write(pf.content) newContent.Write(pf.content)
newFilename := p.Filename() newFilename := p.File().Filename()
if cc.outputDir != "" { if cc.outputDir != "" {
contentDir := strings.TrimSuffix(newFilename, p.Path()) contentDir := strings.TrimSuffix(newFilename, p.Path())

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,11 +18,16 @@ package commands
import ( import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os/signal" "os/signal"
"runtime/pprof"
"runtime/trace"
"sort" "sort"
"sync/atomic" "sync/atomic"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/pkg/errors" "github.com/pkg/errors"
@ -214,6 +219,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
"themesDir", "themesDir",
"verbose", "verbose",
"verboseLog", "verboseLog",
"duplicateTargetPaths",
} }
// Will set a value even if it is the default. // Will set a value even if it is the default.
@ -235,6 +241,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
// Set some "config aliases" // Set some "config aliases"
setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false) setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false) setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false)
setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false)
} }
@ -290,6 +297,7 @@ func (c *commandeer) fullBuild() error {
} }
copyStaticFunc := func() error { copyStaticFunc := func() error {
cnt, err := c.copyStatic() cnt, err := c.copyStatic()
if err != nil { if err != nil {
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
@ -326,7 +334,7 @@ func (c *commandeer) fullBuild() error {
} }
for _, s := range c.hugo.Sites { for _, s := range c.hugo.Sites {
s.ProcessingStats.Static = langCount[s.Language.Lang] s.ProcessingStats.Static = langCount[s.Language().Lang]
} }
if c.h.gc { if c.h.gc {
@ -344,9 +352,125 @@ func (c *commandeer) fullBuild() error {
} }
func (c *commandeer) initCPUProfile() (func(), error) {
if c.h.cpuprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.cpuprofile)
if err != nil {
return nil, errors.Wrap(err, "failed to create CPU profile")
}
if err := pprof.StartCPUProfile(f); err != nil {
return nil, errors.Wrap(err, "failed to start CPU profile")
}
return func() {
pprof.StopCPUProfile()
f.Close()
}, nil
}
func (c *commandeer) initMemProfile() {
if c.h.memprofile == "" {
return
}
f, err := os.Create(c.h.memprofile)
if err != nil {
c.logger.ERROR.Println("could not create memory profile: ", err)
}
defer f.Close()
runtime.GC() // get up-to-date statistics
if err := pprof.WriteHeapProfile(f); err != nil {
c.logger.ERROR.Println("could not write memory profile: ", err)
}
}
func (c *commandeer) initTraceProfile() (func(), error) {
if c.h.traceprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.traceprofile)
if err != nil {
return nil, errors.Wrap(err, "failed to create trace file")
}
if err := trace.Start(f); err != nil {
return nil, errors.Wrap(err, "failed to start trace")
}
return func() {
trace.Stop()
f.Close()
}, nil
}
func (c *commandeer) initMutexProfile() (func(), error) {
if c.h.mutexprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.mutexprofile)
if err != nil {
return nil, err
}
runtime.SetMutexProfileFraction(1)
return func() {
pprof.Lookup("mutex").WriteTo(f, 0)
f.Close()
}, nil
}
func (c *commandeer) initProfiling() (func(), error) {
stopCPUProf, err := c.initCPUProfile()
if err != nil {
return nil, err
}
defer c.initMemProfile()
stopMutexProf, err := c.initMutexProfile()
if err != nil {
return nil, err
}
stopTraceProf, err := c.initTraceProfile()
if err != nil {
return nil, err
}
return func() {
if stopCPUProf != nil {
stopCPUProf()
}
if stopMutexProf != nil {
stopMutexProf()
}
if stopTraceProf != nil {
stopTraceProf()
}
}, nil
}
func (c *commandeer) build() error { func (c *commandeer) build() error {
defer c.timeTrack(time.Now(), "Total") defer c.timeTrack(time.Now(), "Total")
stopProfiling, err := c.initProfiling()
if err != nil {
return err
}
defer func() {
if stopProfiling != nil {
stopProfiling()
}
}()
if err := c.fullBuild(); err != nil { if err := c.fullBuild(); err != nil {
return err return err
} }
@ -356,6 +480,13 @@ func (c *commandeer) build() error {
fmt.Println() fmt.Println()
c.hugo.PrintProcessingStats(os.Stdout) c.hugo.PrintProcessingStats(os.Stdout)
fmt.Println() fmt.Println()
if createCounter, ok := c.destinationFs.(hugofs.DuplicatesReporter); ok {
dupes := createCounter.ReportDuplicates()
if dupes != "" {
c.logger.WARN.Println("Duplicate target paths:", dupes)
}
}
} }
if c.h.buildWatch { if c.h.buildWatch {
@ -369,7 +500,7 @@ func (c *commandeer) build() error {
checkErr(c.Logger, err) checkErr(c.Logger, err)
defer watcher.Close() defer watcher.Close()
var sigs = make(chan os.Signal) var sigs = make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
<-sigs <-sigs
@ -381,6 +512,17 @@ func (c *commandeer) build() error {
func (c *commandeer) serverBuild() error { func (c *commandeer) serverBuild() error {
defer c.timeTrack(time.Now(), "Total") defer c.timeTrack(time.Now(), "Total")
stopProfiling, err := c.initProfiling()
if err != nil {
return err
}
defer func() {
if stopProfiling != nil {
stopProfiling()
}
}()
if err := c.fullBuild(); err != nil { if err := c.fullBuild(); err != nil {
return err return err
} }
@ -474,11 +616,9 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6
} }
c.logger.INFO.Println("syncing static files to", publishDir) c.logger.INFO.Println("syncing static files to", publishDir)
var err error
// because we are using a baseFs (to get the union right). // because we are using a baseFs (to get the union right).
// set sync src to root // set sync src to root
err = syncer.Sync(publishDir, helpers.FilePathSeparator) err := syncer.Sync(publishDir, helpers.FilePathSeparator)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@ -619,13 +759,6 @@ func (c *commandeer) getDirList() ([]string, error) {
return a, nil return a, nil
} }
func (c *commandeer) resetAndBuildSites() (err error) {
if !c.h.quiet {
c.logger.FEEDBACK.Println("Started building sites ...")
}
return c.hugo.Build(hugolib.BuildCfg{ResetState: true})
}
func (c *commandeer) buildSites() (err error) { func (c *commandeer) buildSites() (err error) {
return c.hugo.Build(hugolib.BuildCfg{}) return c.hugo.Build(hugolib.BuildCfg{})
} }
@ -973,7 +1106,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
navigate := c.Cfg.GetBool("navigateToChanged") navigate := c.Cfg.GetBool("navigateToChanged")
// We have fetched the same page above, but it may have // We have fetched the same page above, but it may have
// changed. // changed.
var p *hugolib.Page var p page.Page
if navigate { if navigate {
if onePageName != "" { if onePageName != "" {
@ -982,7 +1115,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
} }
if p != nil { if p != nil {
livereload.NavigateToPathForPort(p.RelPermalink(), p.Site.ServerPort()) livereload.NavigateToPathForPort(p.RelPermalink(), p.Site().ServerPort())
} else { } else {
livereload.ForceRefresh() livereload.ForceRefresh()
} }
@ -1044,9 +1177,11 @@ func (c *commandeer) isThemeVsHugoVersionMismatch(fs afero.Fs) (dir string, mism
} }
b, err := afero.ReadFile(fs, path) b, err := afero.ReadFile(fs, path)
if err != nil {
continue
}
tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML) tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML)
if err != nil { if err != nil {
continue continue
} }

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -340,7 +340,7 @@ func copyDir(source string, dest string) error {
if err != nil { if err != nil {
return err return err
} }
entries, err := ioutil.ReadDir(source) entries, _ := ioutil.ReadDir(source)
for _, entry := range entries { for _, entry := range entries {
sfp := filepath.Join(source, entry.Name()) sfp := filepath.Join(source, entry.Name())
dfp := filepath.Join(dest, entry.Name()) dfp := filepath.Join(dest, entry.Name())
@ -373,6 +373,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
return err return err
} }
entries, err := ioutil.ReadDir(jekyllRoot) entries, err := ioutil.ReadDir(jekyllRoot)
if err != nil {
return err
}
for _, entry := range entries { for _, entry := range entries {
sfp := filepath.Join(jekyllRoot, entry.Name()) sfp := filepath.Join(jekyllRoot, entry.Name())
dfp := filepath.Join(dest, entry.Name()) dfp := filepath.Join(dest, entry.Name())
@ -464,7 +468,7 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b
fs := hugofs.Os fs := hugofs.Os
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil { if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
return fmt.Errorf("Failed to save file %q:", filename) return fmt.Errorf("failed to save file %q: %s", filename, err)
} }
return nil return nil

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -20,6 +20,7 @@ import (
"time" "time"
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cobra" "github.com/spf13/cobra"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
) )
@ -70,7 +71,7 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
for _, p := range sites.Pages() { for _, p := range sites.Pages() {
if p.IsDraft() { if p.IsDraft() {
jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName()))
} }
} }
@ -108,8 +109,8 @@ posted in the future.`,
defer writer.Flush() defer writer.Flush()
for _, p := range sites.Pages() { for _, p := range sites.Pages() {
if p.IsFuture() { if resource.IsFuture(p) {
err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.PublishDate.Format(time.RFC3339)}) err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.PublishDate().Format(time.RFC3339)})
if err != nil { if err != nil {
return newSystemError("Error writing future posts to stdout", err) return newSystemError("Error writing future posts to stdout", err)
} }
@ -149,11 +150,12 @@ expired.`,
defer writer.Flush() defer writer.Flush()
for _, p := range sites.Pages() { for _, p := range sites.Pages() {
if p.IsExpired() { if resource.IsExpired(p) {
err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.ExpiryDate.Format(time.RFC3339)}) err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.ExpiryDate().Format(time.RFC3339)})
if err != nil { if err != nil {
return newSystemError("Error writing expired posts to stdout", err) return newSystemError("Error writing expired posts to stdout", err)
} }
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -62,7 +62,7 @@ func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
_, fs := newTestCfg() _, fs := newTestCfg()
n := newNewSiteCmd() n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
require.NoError(t, n.doNewSite(fs, basepath, false)) require.NoError(t, n.doNewSite(fs, basepath, false))
} }
@ -72,7 +72,7 @@ func TestDoNewSite_error_base_exists(t *testing.T) {
_, fs := newTestCfg() _, fs := newTestCfg()
n := newNewSiteCmd() n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
_, err := fs.Source.Create(filepath.Join(basepath, "foo")) _, err := fs.Source.Create(filepath.Join(basepath, "foo"))
require.NoError(t, err) require.NoError(t, err)
// Since the directory already exists and isn't empty, expect an error // Since the directory already exists and isn't empty, expect an error
@ -85,7 +85,7 @@ func TestDoNewSite_force_empty_dir(t *testing.T) {
_, fs := newTestCfg() _, fs := newTestCfg()
n := newNewSiteCmd() n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
require.NoError(t, n.doNewSite(fs, basepath, true)) require.NoError(t, n.doNewSite(fs, basepath, true))
@ -99,7 +99,7 @@ func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
contentPath := filepath.Join(basepath, "content") contentPath := filepath.Join(basepath, "content")
require.NoError(t, fs.Source.MkdirAll(contentPath, 777)) require.NoError(t, fs.Source.MkdirAll(contentPath, 0777))
require.Error(t, n.doNewSite(fs, basepath, true)) require.Error(t, n.doNewSite(fs, basepath, true))
} }
@ -109,7 +109,7 @@ func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
n := newNewSiteCmd() n := newNewSiteCmd()
configPath := filepath.Join(basepath, "config.toml") configPath := filepath.Join(basepath, "config.toml")
require.NoError(t, fs.Source.MkdirAll(basepath, 777)) require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
_, err := fs.Source.Create(configPath) _, err := fs.Source.Create(configPath)
require.NoError(t, err) require.NoError(t, err)

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -358,7 +358,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
if err := f.c.partialReRender(p); err != nil { if err := f.c.partialReRender(p); err != nil {
f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", p)) f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", p))
if f.c.showErrorInBrowser { if f.c.showErrorInBrowser {
http.Redirect(w, r, p, 301) http.Redirect(w, r, p, http.StatusMovedPermanently)
return return
} }
} }
@ -386,7 +386,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
return mu, u.String(), endpoint, nil return mu, u.String(), endpoint, nil
} }
var logErrorRe = regexp.MustCompile("(?s)ERROR \\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2} ") var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} `)
func removeErrorPrefixFromLog(content string) string { func removeErrorPrefixFromLog(content string) string {
return logErrorRe.ReplaceAllLiteralString(content, "") return logErrorRe.ReplaceAllLiteralString(content, "")
@ -403,7 +403,7 @@ func (c *commandeer) serve(s *serverCmd) error {
if isMultiHost { if isMultiHost {
for _, s := range c.hugo.Sites { for _, s := range c.hugo.Sites {
baseURLs = append(baseURLs, s.BaseURL.String()) baseURLs = append(baseURLs, s.BaseURL.String())
roots = append(roots, s.Language.Lang) roots = append(roots, s.Language().Lang)
} }
} else { } else {
s := c.hugo.Sites[0] s := c.hugo.Sites[0]
@ -430,7 +430,7 @@ func (c *commandeer) serve(s *serverCmd) error {
livereload.Initialize() livereload.Initialize()
} }
var sigs = make(chan os.Signal) var sigs = make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
for i := range baseURLs { for i := range baseURLs {

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -92,9 +92,7 @@ func appendToInterfaceSlice(tov reflect.Value, from ...interface{}) ([]interface
tos = append(tos, tov.Index(i).Interface()) tos = append(tos, tov.Index(i).Interface())
} }
for _, v := range from { tos = append(tos, from...)
tos = append(tos, v)
}
return tos, nil return tos, nil
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -75,11 +75,11 @@ func (p *tstSlicerIn2) Slice(in interface{}) (interface{}, error) {
} }
func (p *tstSlicerIn1) Name() string { func (p *tstSlicerIn1) Name() string {
return p.Name() return p.name
} }
func (p *tstSlicerIn2) Name() string { func (p *tstSlicerIn2) Name() string {
return p.Name() return p.name
} }
func (p *tstSlicer) Slice(in interface{}) (interface{}, error) { func (p *tstSlicer) Slice(in interface{}) (interface{}, error) {

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -32,6 +32,7 @@ type ReadSeekCloser interface {
} }
// ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close. // ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
type ReadSeekerNoOpCloser struct { type ReadSeekerNoOpCloser struct {
ReadSeeker ReadSeeker
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -28,6 +28,24 @@ type Scratch struct {
mu sync.RWMutex mu sync.RWMutex
} }
// Scratcher provides a scratching service.
type Scratcher interface {
Scratch() *Scratch
}
type scratcher struct {
s *Scratch
}
func (s scratcher) Scratch() *Scratch {
return s.s
}
// NewScratcher creates a new Scratcher.
func NewScratcher() Scratcher {
return scratcher{s: NewScratch()}
}
// Add will, for single values, add (using the + operator) the addend to the existing addend (if found). // Add will, for single values, add (using the + operator) the addend to the existing addend (if found).
// Supports numeric values and strings. // Supports numeric values and strings.
// //

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -16,6 +16,7 @@ package types
import ( import (
"fmt" "fmt"
"reflect"
"github.com/spf13/cast" "github.com/spf13/cast"
) )
@ -56,3 +57,24 @@ func NewKeyValuesStrings(key string, values ...string) KeyValues {
type Zeroer interface { type Zeroer interface {
IsZero() bool IsZero() bool
} }
// IsNil reports whether v is nil.
func IsNil(v interface{}) bool {
if v == nil {
return true
}
value := reflect.ValueOf(v)
switch value.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
return value.IsNil()
}
return false
}
// DevMarker is a marker interface for types that should only be used during
// development.
type DevMarker interface {
DevOnly()
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -40,3 +40,15 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string {
} }
return cast.ToStringSlice(sd) return cast.ToStringSlice(sd)
} }
// SetBaseTestDefaults provides some common config defaults used in tests.
func SetBaseTestDefaults(cfg Provider) {
cfg.Set("resourceDir", "resources")
cfg.Set("contentDir", "content")
cfg.Set("dataDir", "data")
cfg.Set("i18nDir", "i18n")
cfg.Set("layoutDir", "layouts")
cfg.Set("assetDir", "assets")
cfg.Set("archetypeDir", "archetypes")
cfg.Set("publishDir", "public")
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -23,6 +23,7 @@ const (
disqusShortnameKey = "disqusshortname" disqusShortnameKey = "disqusshortname"
googleAnalyticsKey = "googleanalytics" googleAnalyticsKey = "googleanalytics"
rssLimitKey = "rssLimit"
) )
// Config is a privacy configuration for all the relevant services in Hugo. // Config is a privacy configuration for all the relevant services in Hugo.
@ -31,6 +32,7 @@ type Config struct {
GoogleAnalytics GoogleAnalytics GoogleAnalytics GoogleAnalytics
Instagram Instagram Instagram Instagram
Twitter Twitter Twitter Twitter
RSS RSS
} }
// Disqus holds the functional configuration settings related to the Disqus template. // Disqus holds the functional configuration settings related to the Disqus template.
@ -61,6 +63,12 @@ type Twitter struct {
DisableInlineCSS bool DisableInlineCSS bool
} }
// RSS holds the functional configuration settings related to the RSS feeds.
type RSS struct {
// Limit the number of pages.
Limit int
}
// DecodeConfig creates a services Config from a given Hugo configuration. // DecodeConfig creates a services Config from a given Hugo configuration.
func DecodeConfig(cfg config.Provider) (c Config, err error) { func DecodeConfig(cfg config.Provider) (c Config, err error) {
m := cfg.GetStringMap(servicesConfigKey) m := cfg.GetStringMap(servicesConfigKey)
@ -76,5 +84,9 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) {
c.Disqus.Shortname = cfg.GetString(disqusShortnameKey) c.Disqus.Shortname = cfg.GetString(disqusShortnameKey)
} }
if c.RSS.Limit == 0 {
c.RSS.Limit = cfg.GetInt(rssLimitKey)
}
return return
} }

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package hugolib package config
import ( import (
"github.com/spf13/cast" "github.com/spf13/cast"
@ -25,21 +25,20 @@ type Sitemap struct {
Filename string Filename string
} }
func parseSitemap(input map[string]interface{}) Sitemap { func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap {
sitemap := Sitemap{Priority: -1, Filename: "sitemap.xml"}
for key, value := range input { for key, value := range input {
switch key { switch key {
case "changefreq": case "changefreq":
sitemap.ChangeFreq = cast.ToString(value) prototype.ChangeFreq = cast.ToString(value)
case "priority": case "priority":
sitemap.Priority = cast.ToFloat64(value) prototype.Priority = cast.ToFloat64(value)
case "filename": case "filename":
sitemap.Filename = cast.ToString(value) prototype.Filename = cast.ToString(value)
default: default:
jww.WARN.Printf("Unknown Sitemap field: %s\n", key) jww.WARN.Printf("Unknown Sitemap field: %s\n", key)
} }
} }
return sitemap return prototype
} }

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -50,7 +50,7 @@ func NewContent(
if isDir { if isDir {
langFs := hugofs.NewLanguageFs(s.Language.Lang, sites.LanguageSet(), archetypeFs) langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs)
cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename) cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename)
if err != nil { if err != nil {
@ -113,7 +113,7 @@ func NewContent(
func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site { func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site {
for _, s := range sites.Sites { for _, s := range sites.Sites {
if fi.Lang() == s.Language.Lang { if fi.Lang() == s.Language().Lang {
return s return s
} }
} }
@ -245,7 +245,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
// Try the filename: my-post.en.md // Try the filename: my-post.en.md
for _, ss := range sites.Sites { for _, ss := range sites.Sites {
if strings.Contains(targetPath, "."+ss.Language.Lang+".") { if strings.Contains(targetPath, "."+ss.Language().Lang+".") {
s = ss s = ss
break break
} }

7
deps/deps.go vendored
View File

@ -7,13 +7,14 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/gohugoio/hugo/cache/filecache" "github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/metrics" "github.com/gohugoio/hugo/metrics"
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources"
@ -67,7 +68,7 @@ type Deps struct {
Language *langs.Language Language *langs.Language
// The site building. // The site building.
Site hugo.Site Site page.Site
// All the output formats available for the current site. // All the output formats available for the current site.
OutputFormatsConfig output.Formats OutputFormatsConfig output.Formats
@ -325,7 +326,7 @@ type DepsCfg struct {
Language *langs.Language Language *langs.Language
// The Site in use // The Site in use
Site hugo.Site Site page.Site
// The configuration to use. // The configuration to use.
Cfg config.Provider Cfg config.Provider

View File

@ -79,8 +79,7 @@ See [`.Scratch`](/functions/scratch/) for page-scoped, writable variables.
: the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections. : the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections.
.Language .Language
: a language object that points to the language's definition in the site : a language object that points to the language's definition in the site `config`. `.Language.Lang` gives you the language code.
`config`.
.Lastmod .Lastmod
: the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter. : the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter.
@ -93,10 +92,7 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
.LinkTitle .LinkTitle
: access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`. : access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`.
.Next (deprecated) .Next
: In older Hugo versions this pointer went the wrong direction. Please use `.PrevPage` instead.
.NextPage
: Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`. : Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`.
.NextInSection .NextInSection
@ -119,9 +115,6 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
: the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice. : the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice.
.Prev (deprecated) .Prev (deprecated)
: In older Hugo versions this pointer went the wrong direction. Please use `.NextPage` instead.
.PrevPage
: Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`. : Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`.
.PrevInSection .PrevInSection
@ -130,8 +123,8 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
.PublishDate .PublishDate
: the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`. : the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`.
.RSSLink .RSSLink (deprecated)
: link to the taxonomies' RSS link. : link to the page's RSS feed. This is deprecated. You should instead do something like this: `{{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`.
.RawContent .RawContent
: raw markdown content without the front matter. Useful with [remarkjs.com]( : raw markdown content without the front matter. Useful with [remarkjs.com](

1
go.mod
View File

@ -44,7 +44,6 @@ require (
github.com/spf13/cobra v0.0.3 github.com/spf13/cobra v0.0.3
github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05
github.com/spf13/jwalterweatherman v1.1.0 github.com/spf13/jwalterweatherman v1.1.0
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d
github.com/spf13/pflag v1.0.3 github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.3.2 github.com/spf13/viper v1.3.2
github.com/stretchr/testify v1.3.0 github.com/stretchr/testify v1.3.0

2
go.sum
View File

@ -126,8 +126,6 @@ github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d h1:ihvj2nmx8eqWjlgNgdW6h0DyGJuq5GiwHadJkG0wXtQ=
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d/go.mod h1:jU8A+8xL+6n1OX4XaZtCj4B3mIa64tULUsD6YegdpFo=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -57,7 +57,7 @@ type ContentSpec struct {
Highlight func(code, lang, optsStr string) (string, error) Highlight func(code, lang, optsStr string) (string, error)
defatultPygmentsOpts map[string]string defatultPygmentsOpts map[string]string
cfg config.Provider Cfg config.Provider
} }
// NewContentSpec returns a ContentSpec initialized // NewContentSpec returns a ContentSpec initialized
@ -73,7 +73,7 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) {
BuildExpired: cfg.GetBool("buildExpired"), BuildExpired: cfg.GetBool("buildExpired"),
BuildDrafts: cfg.GetBool("buildDrafts"), BuildDrafts: cfg.GetBool("buildDrafts"),
cfg: cfg, Cfg: cfg,
} }
// Highlighting setup // Highlighting setup
@ -382,7 +382,7 @@ func (c *ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingConte
return &HugoMmarkHTMLRenderer{ return &HugoMmarkHTMLRenderer{
cs: c, cs: c,
Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters), Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
Cfg: c.cfg, Cfg: c.Cfg,
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -24,7 +24,7 @@ import (
// Renders a codeblock using Blackfriday // Renders a codeblock using Blackfriday
func (c ContentSpec) render(input string) string { func (c ContentSpec) render(input string) string {
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
render := c.getHTMLRenderer(0, ctx) render := c.getHTMLRenderer(0, ctx)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
@ -34,7 +34,7 @@ func (c ContentSpec) render(input string) string {
// Renders a codeblock using Mmark // Renders a codeblock using Mmark
func (c ContentSpec) renderWithMmark(input string) string { func (c ContentSpec) renderWithMmark(input string) string {
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
render := c.getMmarkHTMLRenderer(0, ctx) render := c.getMmarkHTMLRenderer(0, ctx)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -181,7 +181,7 @@ func TestTruncateWordsByRune(t *testing.T) {
func TestGetHTMLRendererFlags(t *testing.T) { func TestGetHTMLRendererFlags(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx) renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx)
flags := renderer.GetFlags() flags := renderer.GetFlags()
if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML { if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML {
@ -210,7 +210,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
{blackfriday.HTML_SMARTYPANTS_LATEX_DASHES}, {blackfriday.HTML_SMARTYPANTS_LATEX_DASHES},
} }
defaultFlags := blackfriday.HTML_USE_XHTML defaultFlags := blackfriday.HTML_USE_XHTML
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.AngledQuotes = true ctx.Config.AngledQuotes = true
ctx.Config.Fractions = true ctx.Config.Fractions = true
ctx.Config.HrefTargetBlank = true ctx.Config.HrefTargetBlank = true
@ -235,7 +235,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
func TestGetHTMLRendererAnchors(t *testing.T) { func TestGetHTMLRendererAnchors(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.DocumentID = "testid" ctx.DocumentID = "testid"
ctx.Config.PlainIDAnchors = false ctx.Config.PlainIDAnchors = false
@ -259,7 +259,7 @@ func TestGetHTMLRendererAnchors(t *testing.T) {
func TestGetMmarkHTMLRenderer(t *testing.T) { func TestGetMmarkHTMLRenderer(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.DocumentID = "testid" ctx.DocumentID = "testid"
ctx.Config.PlainIDAnchors = false ctx.Config.PlainIDAnchors = false
actualRenderer := c.getMmarkHTMLRenderer(0, ctx) actualRenderer := c.getMmarkHTMLRenderer(0, ctx)
@ -283,7 +283,7 @@ func TestGetMmarkHTMLRenderer(t *testing.T) {
func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) { func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"headerId"} ctx.Config.Extensions = []string{"headerId"}
ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"} ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"}
@ -298,7 +298,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
testFlag int testFlag int
} }
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{""} ctx.Config.Extensions = []string{""}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
allExtensions := []data{ allExtensions := []data{
@ -330,7 +330,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"definitionLists"} ctx.Config.Extensions = []string{"definitionLists"}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
@ -342,7 +342,7 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
func TestGetMarkdownRenderer(t *testing.T) { func TestGetMarkdownRenderer(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.markdownRender(ctx) actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
@ -353,7 +353,7 @@ func TestGetMarkdownRenderer(t *testing.T) {
func TestGetMarkdownRendererWithTOC(t *testing.T) { func TestGetMarkdownRendererWithTOC(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{RenderTOC: true, Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.markdownRender(ctx) actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n")
@ -368,7 +368,7 @@ func TestGetMmarkExtensions(t *testing.T) {
testFlag int testFlag int
} }
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"tables"} ctx.Config.Extensions = []string{"tables"}
ctx.Config.ExtensionsMask = []string{""} ctx.Config.ExtensionsMask = []string{""}
allExtensions := []data{ allExtensions := []data{
@ -397,7 +397,7 @@ func TestGetMmarkExtensions(t *testing.T) {
func TestMmarkRender(t *testing.T) { func TestMmarkRender(t *testing.T) {
c := newTestContentSpec() c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent") ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.mmarkRender(ctx) actualRenderedMarkdown := c.mmarkRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n") expectedRenderedMarkdown := []byte("<p>testContent</p>\n")

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -92,7 +92,7 @@ func GuessType(in string) string {
return "org" return "org"
} }
return "unknown" return ""
} }
// FirstUpper returns a string with the first character as upper case. // FirstUpper returns a string with the first character as upper case.
@ -325,12 +325,15 @@ func InitLoggers() {
// The idea is two remove an item in two Hugo releases to give users and theme authors // The idea is two remove an item in two Hugo releases to give users and theme authors
// plenty of time to fix their templates. // plenty of time to fix their templates.
func Deprecated(object, item, alternative string, err bool) { func Deprecated(object, item, alternative string, err bool) {
if !strings.HasSuffix(alternative, ".") {
alternative += "."
}
if err { if err {
DistinctErrorLog.Printf("%s's %s is deprecated and will be removed in Hugo %s. %s", object, item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative) DistinctErrorLog.Printf("%s's %s is deprecated and will be removed in Hugo %s. %s", object, item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
} else { } else {
// Make sure the users see this while avoiding build breakage. This will not lead to an os.Exit(-1) DistinctWarnLog.Printf("%s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
DistinctFeedbackLog.Printf("WARNING: %s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -42,7 +42,7 @@ func TestGuessType(t *testing.T) {
{"html", "html"}, {"html", "html"},
{"htm", "html"}, {"htm", "html"},
{"org", "org"}, {"org", "org"},
{"excel", "unknown"}, {"excel", ""},
} { } {
result := GuessType(this.in) result := GuessType(this.in)
if result != this.expect { if result != this.expect {
@ -166,6 +166,27 @@ var containsAdditionalTestData = []struct {
{"", []byte(""), false}, {"", []byte(""), false},
} }
func TestSliceToLower(t *testing.T) {
t.Parallel()
tests := []struct {
value []string
expected []string
}{
{[]string{"a", "b", "c"}, []string{"a", "b", "c"}},
{[]string{"a", "B", "c"}, []string{"a", "b", "c"}},
{[]string{"A", "B", "C"}, []string{"a", "b", "c"}},
}
for _, test := range tests {
res := SliceToLower(test.value)
for i, val := range res {
if val != test.expected[i] {
t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
}
}
}
}
func TestReaderContains(t *testing.T) { func TestReaderContains(t *testing.T) {
for i, this := range append(containsBenchTestData, containsAdditionalTestData...) { for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
result := ReaderContains(strings.NewReader(this.v1), this.v2) result := ReaderContains(strings.NewReader(this.v1), this.v2)

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -86,6 +86,13 @@ func (p *PathSpec) MakePath(s string) string {
return p.UnicodeSanitize(s) return p.UnicodeSanitize(s)
} }
// MakePathsSanitized applies MakePathSanitized on every item in the slice
func (p *PathSpec) MakePathsSanitized(paths []string) {
for i, path := range paths {
paths[i] = p.MakePathSanitized(path)
}
}
// MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced // MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
func (p *PathSpec) MakePathSanitized(s string) string { func (p *PathSpec) MakePathSanitized(s string) string {
if p.DisablePathToLower { if p.DisablePathToLower {

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -56,7 +56,7 @@ type highlighters struct {
} }
func newHiglighters(cs *ContentSpec) highlighters { func newHiglighters(cs *ContentSpec) highlighters {
return highlighters{cs: cs, ignoreCache: cs.cfg.GetBool("ignoreCache"), cacheDir: cs.cfg.GetString("cacheDir")} return highlighters{cs: cs, ignoreCache: cs.Cfg.GetBool("ignoreCache"), cacheDir: cs.Cfg.GetString("cacheDir")}
} }
func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) { func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) {

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,8 +14,13 @@
package htesting package htesting
import ( import (
"html/template"
"time"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/resources/page"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
@ -28,6 +33,22 @@ func (t testSite) Hugo() hugo.Info {
return t.h return t.h
} }
func (t testSite) ServerPort() int {
return 1313
}
func (testSite) LastChange() (t time.Time) {
return
}
func (t testSite) Title() string {
return "foo"
}
func (t testSite) Sites() page.Sites {
return nil
}
func (t testSite) IsServer() bool { func (t testSite) IsServer() bool {
return false return false
} }
@ -36,8 +57,36 @@ func (t testSite) Language() *langs.Language {
return t.l return t.l
} }
func (t testSite) Pages() page.Pages {
return nil
}
func (t testSite) RegularPages() page.Pages {
return nil
}
func (t testSite) Menus() navigation.Menus {
return nil
}
func (t testSite) Taxonomies() interface{} {
return nil
}
func (t testSite) BaseURL() template.URL {
return ""
}
func (t testSite) Params() map[string]interface{} {
return nil
}
func (t testSite) Data() map[string]interface{} {
return nil
}
// NewTestHugoSite creates a new minimal test site. // NewTestHugoSite creates a new minimal test site.
func NewTestHugoSite() hugo.Site { func NewTestHugoSite() page.Site {
return testSite{ return testSite{
h: hugo.NewInfo(hugo.EnvironmentProduction), h: hugo.NewInfo(hugo.EnvironmentProduction),
l: langs.NewLanguage("en", newTestConfig()), l: langs.NewLanguage("en", newTestConfig()),

View File

@ -0,0 +1,99 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"sort"
"strings"
"sync"
"github.com/spf13/afero"
)
// Reseter is implemented by some of the stateful filesystems.
type Reseter interface {
Reset()
}
// DuplicatesReporter reports about duplicate filenames.
type DuplicatesReporter interface {
ReportDuplicates() string
}
func NewCreateCountingFs(fs afero.Fs) afero.Fs {
return &createCountingFs{Fs: fs, fileCount: make(map[string]int)}
}
// ReportDuplicates reports filenames written more than once.
func (c *createCountingFs) ReportDuplicates() string {
c.mu.Lock()
defer c.mu.Unlock()
var dupes []string
for k, v := range c.fileCount {
if v > 1 {
dupes = append(dupes, fmt.Sprintf("%s (%d)", k, v))
}
}
if len(dupes) == 0 {
return ""
}
sort.Strings(dupes)
return strings.Join(dupes, ", ")
}
// createCountingFs counts filenames of created files or files opened
// for writing.
type createCountingFs struct {
afero.Fs
mu sync.Mutex
fileCount map[string]int
}
func (c *createCountingFs) Reset() {
c.mu.Lock()
defer c.mu.Unlock()
c.fileCount = make(map[string]int)
}
func (fs *createCountingFs) onCreate(filename string) {
fs.mu.Lock()
defer fs.mu.Unlock()
fs.fileCount[filename] = fs.fileCount[filename] + 1
}
func (fs *createCountingFs) Create(name string) (afero.File, error) {
f, err := fs.Fs.Create(name)
if err == nil {
fs.onCreate(name)
}
return f, err
}
func (fs *createCountingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
f, err := fs.Fs.OpenFile(name, flag, perm)
if err == nil && isWrite(flag) {
fs.onCreate(name)
}
return f, err
}

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -15,6 +15,8 @@
package hugofs package hugofs
import ( import (
"os"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/spf13/afero" "github.com/spf13/afero"
) )
@ -80,3 +82,7 @@ func getWorkingDirFs(base afero.Fs, cfg config.Provider) *afero.BasePathFs {
return nil return nil
} }
func isWrite(flag int) bool {
return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -67,10 +67,6 @@ func (fs *md5HashingFs) wrapFile(f afero.File) afero.File {
return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver} return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver}
} }
func isWrite(flag int) bool {
return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
}
func (fs *md5HashingFs) Name() string { func (fs *md5HashingFs) Name() string {
return "md5HashingFs" return "md5HashingFs"
} }

70
hugofs/stacktracer_fs.go Normal file
View File

@ -0,0 +1,70 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"regexp"
"runtime"
"github.com/gohugoio/hugo/common/types"
"github.com/spf13/afero"
)
// Make sure we don't accidently use this in the real Hugo.
var _ types.DevMarker = (*stacktracerFs)(nil)
// NewStacktracerFs wraps the given fs printing stack traces for file creates
// matching the given regexp pattern.
func NewStacktracerFs(fs afero.Fs, pattern string) afero.Fs {
return &stacktracerFs{Fs: fs, re: regexp.MustCompile(pattern)}
}
// stacktracerFs can be used in hard-to-debug development situations where
// you get some input you don't understand where comes from.
type stacktracerFs struct {
afero.Fs
// Will print stacktrace for every file creates matching this pattern.
re *regexp.Regexp
}
func (fs *stacktracerFs) DevOnly() {
}
func (fs *stacktracerFs) onCreate(filename string) {
if fs.re.MatchString(filename) {
trace := make([]byte, 1500)
runtime.Stack(trace, true)
fmt.Printf("\n===========\n%q:\n%s\n", filename, trace)
}
}
func (fs *stacktracerFs) Create(name string) (afero.File, error) {
f, err := fs.Fs.Create(name)
if err == nil {
fs.onCreate(name)
}
return f, err
}
func (fs *stacktracerFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
f, err := fs.Fs.OpenFile(name, flag, perm)
if err == nil && isWrite(flag) {
fs.onCreate(name)
}
return f, err
}

View File

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -26,6 +26,7 @@ import (
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/publisher" "github.com/gohugoio/hugo/publisher"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -55,7 +56,12 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al
return aliasHandler{t, l, allowRoot} return aliasHandler{t, l, allowRoot}
} }
func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) { type aliasPage struct {
Permalink string
page.Page
}
func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) {
t := "alias" t := "alias"
if isXHTML { if isXHTML {
t = "alias-xhtml" t = "alias-xhtml"
@ -75,12 +81,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
} }
} }
data := struct { data := aliasPage{
Permalink string
Page *Page
}{
permalink, permalink,
page, p,
} }
buffer := new(bytes.Buffer) buffer := new(bytes.Buffer)
@ -91,11 +94,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
return buffer, nil return buffer, nil
} }
func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) { func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) {
return s.publishDestAlias(false, path, permalink, outputFormat, p) return s.publishDestAlias(false, path, permalink, outputFormat, p)
} }
func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) { func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) {
handler := newAliasHandler(s.Tmpl, s.Log, allowRoot) handler := newAliasHandler(s.Tmpl, s.Log, allowRoot)
isXHTML := strings.HasSuffix(path, ".xhtml") isXHTML := strings.HasSuffix(path, ".xhtml")
@ -126,19 +129,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo
func (a aliasHandler) targetPathAlias(src string) (string, error) { func (a aliasHandler) targetPathAlias(src string) (string, error) {
originalAlias := src originalAlias := src
if len(src) <= 0 { if len(src) <= 0 {
return "", fmt.Errorf("Alias \"\" is an empty string") return "", fmt.Errorf("alias \"\" is an empty string")
} }
alias := filepath.Clean(src) alias := filepath.Clean(src)
components := strings.Split(alias, helpers.FilePathSeparator) components := strings.Split(alias, helpers.FilePathSeparator)
if !a.allowRoot && alias == helpers.FilePathSeparator { if !a.allowRoot && alias == helpers.FilePathSeparator {
return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias) return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias)
} }
// Validate against directory traversal // Validate against directory traversal
if components[0] == ".." { if components[0] == ".." {
return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias) return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias)
} }
// Handle Windows file and directory naming restrictions // Handle Windows file and directory naming restrictions
@ -171,7 +174,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) {
for _, m := range msgs { for _, m := range msgs {
a.log.ERROR.Println(m) a.log.ERROR.Println(m)
} }
return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias) return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias)
} }
for _, m := range msgs { for _, m := range msgs {
a.log.INFO.Println(m) a.log.INFO.Println(m)

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -50,7 +50,7 @@ func TestAlias(t *testing.T) {
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites)) assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 1) require.Len(t, b.H.Sites[0].RegularPages(), 1)
// the real page // the real page
b.AssertFileContent("public/page/index.html", "For some moments the old man") b.AssertFileContent("public/page/index.html", "For some moments the old man")

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,19 +14,13 @@
package hugolib package hugolib
import ( import (
"fmt"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/common/collections" "github.com/gohugoio/hugo/common/collections"
"github.com/gohugoio/hugo/resources/page"
) )
var ( var (
_ collections.Grouper = (*Page)(nil) _ collections.Grouper = (*pageState)(nil)
_ collections.Slicer = (*Page)(nil) _ collections.Slicer = (*pageState)(nil)
_ collections.Slicer = PageGroup{}
_ collections.Slicer = WeightedPage{}
_ resource.ResourcesConverter = Pages{}
) )
// collections.Slicer implementations below. We keep these bridge implementations // collections.Slicer implementations below. We keep these bridge implementations
@ -35,50 +29,8 @@ var (
// Slice is not meant to be used externally. It's a bridge function // Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice. // for the template functions. See collections.Slice.
func (p *Page) Slice(items interface{}) (interface{}, error) { func (p *pageState) Slice(items interface{}) (interface{}, error) {
return toPages(items) return page.ToPages(items)
}
// Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice.
func (p PageGroup) Slice(in interface{}) (interface{}, error) {
switch items := in.(type) {
case PageGroup:
return items, nil
case []interface{}:
groups := make(PagesGroup, len(items))
for i, v := range items {
g, ok := v.(PageGroup)
if !ok {
return nil, fmt.Errorf("type %T is not a PageGroup", v)
}
groups[i] = g
}
return groups, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
}
// Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice.
func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
switch items := in.(type) {
case WeightedPages:
return items, nil
case []interface{}:
weighted := make(WeightedPages, len(items))
for i, v := range items {
g, ok := v.(WeightedPage)
if !ok {
return nil, fmt.Errorf("type %T is not a WeightedPage", v)
}
weighted[i] = g
}
return weighted, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
} }
// collections.Grouper implementations below // collections.Grouper implementations below
@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
// Group creates a PageGroup from a key and a Pages object // Group creates a PageGroup from a key and a Pages object
// This method is not meant for external use. It got its non-typed arguments to satisfy // This method is not meant for external use. It got its non-typed arguments to satisfy
// a very generic interface in the tpl package. // a very generic interface in the tpl package.
func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) { func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) {
pages, err := toPages(in) pages, err := page.ToPages(in)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return PageGroup{Key: key, Pages: pages}, nil return page.PageGroup{Key: key, Pages: pages}, nil
}
// ToResources wraps resource.ResourcesConverter
func (pages Pages) ToResources() resource.Resources {
r := make(resource.Resources, len(pages))
for i, p := range pages {
r[i] = p
}
return r
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -40,7 +40,7 @@ title: "Page"
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites)) assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2) require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html", "cool: 2") b.AssertFileContent("public/index.html", "cool: 2")
} }
@ -79,12 +79,12 @@ tags_weight: %d
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites)) assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2) require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html", b.AssertFileContent("public/index.html",
"pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)", "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)",
"pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)", "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)",
`weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
} }
func TestAppendFunc(t *testing.T) { func TestAppendFunc(t *testing.T) {
@ -129,11 +129,11 @@ tags_weight: %d
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites)) assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2) require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html", b.AssertFileContent("public/index.html",
"pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)", "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)",
"appendPages:9:hugolib.Pages:home/page", "appendPages:9:page.Pages:home/page",
"appendStrings:[]string:[a b c d e]", "appendStrings:[]string:[a b c d e]",
"appendStringsSlice:[]string:[a b c c d]", "appendStringsSlice:[]string:[a b c c d]",
"union:[]string:[a b c d e]", "union:[]string:[a b c d e]",

View File

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -24,7 +24,6 @@ import (
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/hugolib/paths"
"github.com/pkg/errors" "github.com/pkg/errors"
_errors "github.com/pkg/errors" _errors "github.com/pkg/errors"
@ -177,14 +176,6 @@ type configLoader struct {
ConfigSourceDescriptor ConfigSourceDescriptor
} }
func (l configLoader) wrapFileInfoError(err error, fi os.FileInfo) error {
rfi, ok := fi.(hugofs.RealFilenameInfo)
if !ok {
return err
}
return l.wrapFileError(err, rfi.RealFilename())
}
func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) { func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) {
baseDir := l.configFileDir() baseDir := l.configFileDir()
var baseFilename string var baseFilename string
@ -240,11 +231,6 @@ func (l configLoader) wrapFileError(err error, filename string) error {
return err return err
} }
func (l configLoader) newRealBaseFs(path string) afero.Fs {
return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs))
}
func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) { func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) {
sourceFs := l.Fs sourceFs := l.Fs
configDir := l.AbsConfigDir configDir := l.AbsConfigDir
@ -274,7 +260,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error)
for _, configDir := range configDirs { for _, configDir := range configDirs {
err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error { err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error {
if fi == nil { if fi == nil || err != nil {
return nil return nil
} }
@ -616,8 +602,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("removePathAccents", false) v.SetDefault("removePathAccents", false)
v.SetDefault("titleCaseStyle", "AP") v.SetDefault("titleCaseStyle", "AP")
v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"}) v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"})
v.SetDefault("permalinks", make(PermalinkOverrides, 0)) v.SetDefault("permalinks", make(map[string]string))
v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"}) v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"})
v.SetDefault("pygmentsStyle", "monokai") v.SetDefault("pygmentsStyle", "monokai")
v.SetDefault("pygmentsUseClasses", false) v.SetDefault("pygmentsUseClasses", false)
v.SetDefault("pygmentsCodeFences", false) v.SetDefault("pygmentsCodeFences", false)
@ -625,7 +611,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("pygmentsOptions", "") v.SetDefault("pygmentsOptions", "")
v.SetDefault("disableLiveReload", false) v.SetDefault("disableLiveReload", false)
v.SetDefault("pluralizeListTitles", true) v.SetDefault("pluralizeListTitles", true)
v.SetDefault("preserveTaxonomyNames", false)
v.SetDefault("forceSyncStatic", false) v.SetDefault("forceSyncStatic", false)
v.SetDefault("footnoteAnchorPrefix", "") v.SetDefault("footnoteAnchorPrefix", "")
v.SetDefault("footnoteReturnLinkContents", "") v.SetDefault("footnoteReturnLinkContents", "")

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true}) s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true})
if !expectBuildError && !reflect.DeepEqual(expected, s.Data) { if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) {
// This disabled code detects the situation described in the WARNING message below. // This disabled code detects the situation described in the WARNING message below.
// The situation seems to only occur for TOML data with integer values. // The situation seems to only occur for TOML data with integer values.
// Perhaps the TOML parser returns ints in another type. // Perhaps the TOML parser returns ints in another type.
@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
} }
*/ */
return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data) return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data())
} }
return return

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,6 +18,8 @@ import (
"fmt" "fmt"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) {
func TestDisableKindsSomeDisabled(t *testing.T) { func TestDisableKindsSomeDisabled(t *testing.T) {
t.Parallel() t.Parallel()
doTestDisableKinds(t, KindSection, kind404) doTestDisableKinds(t, page.KindSection, kind404)
} }
func TestDisableKindsOneDisabled(t *testing.T) { func TestDisableKindsOneDisabled(t *testing.T) {
t.Parallel() t.Parallel()
for _, kind := range allKinds { for _, kind := range allKinds {
if kind == KindPage { if kind == page.KindPage {
// Turning off regular page generation have some side-effects // Turning off regular page generation have some side-effects
// not handled by the assertions below (no sections), so // not handled by the assertions below (no sections), so
// skip that for now. // skip that for now.
@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
if isDisabled { if isDisabled {
return len(s.RegularPages) == 0 return len(s.RegularPages()) == 0
} }
return len(s.RegularPages) > 0 return len(s.RegularPages()) > 0
}, disabled, KindPage, "public/sect/p1/index.html", "Single|P1") }, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindHome) p := s.getPage(page.KindHome)
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindHome, "public/index.html", "Home") }, disabled, page.KindHome, "public/index.html", "Home")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindSection, "sect") p := s.getPage(page.KindSection, "sect")
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindSection, "public/sect/index.html", "Sects") }, disabled, page.KindSection, "public/sect/index.html", "Sects")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindTaxonomy, "tags", "tag1") p := s.getPage(page.KindTaxonomy, "tags", "tag1")
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1") }, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindTaxonomyTerm, "tags") p := s.getPage(page.KindTaxonomyTerm, "tags")
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags") }, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindTaxonomyTerm, "categories") p := s.getPage(page.KindTaxonomyTerm, "categories")
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms") }, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
assertDisabledKind(th, assertDisabledKind(th,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(KindTaxonomy, "categories", "hugo") p := s.getPage(page.KindTaxonomy, "categories", "hugo")
if isDisabled { if isDisabled {
return p == nil return p == nil
} }
return p != nil return p != nil
}, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo") }, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
// The below have no page in any collection. // The below have no page in any collection.
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>") assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>")
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap") assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap")
@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st
if kind == kindRSS && !isDisabled { if kind == kindRSS && !isDisabled {
// If the home page is also disabled, there is not RSS to look for. // If the home page is also disabled, there is not RSS to look for.
if stringSliceContains(KindHome, disabled...) { if stringSliceContains(page.KindHome, disabled...) {
isDisabled = true isDisabled = true
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/cast"
"path/filepath" "path/filepath"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages(), 1)
output := string(s.RegularPages[0].content()) content, err := s.RegularPages()[0].Content()
require.NoError(t, err)
output := cast.ToString(content)
if !strings.Contains(output, expected) { if !strings.Contains(output, expected) {
t.Errorf("Got\n%q\nExpected\n%q", output, expected) t.Errorf("Got\n%q\nExpected\n%q", output, expected)

View File

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -19,6 +19,7 @@ import (
"github.com/bep/gitmap" "github.com/bep/gitmap"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/resources/page"
) )
type gitInfo struct { type gitInfo struct {
@ -26,15 +27,12 @@ type gitInfo struct {
repo *gitmap.GitRepo repo *gitmap.GitRepo
} }
func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) { func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo {
if g == nil { name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir)
return nil, false
}
name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir)
name = strings.TrimPrefix(name, "/") name = strings.TrimPrefix(name, "/")
return g.repo.Files[name], true return g.repo.Files[name]
} }
func newGitInfo(cfg config.Provider) (*gitInfo, error) { func newGitInfo(cfg config.Provider) (*gitInfo, error) {

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,14 +14,24 @@
package hugolib package hugolib
import ( import (
"errors"
"io" "io"
"path/filepath" "path/filepath"
"sort" "sort"
"strings" "strings"
"sync" "sync"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/hugofs"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/source"
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/publisher" "github.com/gohugoio/hugo/publisher"
@ -30,8 +40,10 @@ import (
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/i18n" "github.com/gohugoio/hugo/i18n"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/tpl/tplimpl" "github.com/gohugoio/hugo/tpl/tplimpl"
) )
@ -48,17 +60,96 @@ type HugoSites struct {
// If this is running in the dev server. // If this is running in the dev server.
running bool running bool
// Render output formats for all sites.
renderFormats output.Formats
*deps.Deps *deps.Deps
gitInfo *gitInfo
// As loaded from the /data dirs
data map[string]interface{}
// Keeps track of bundle directories and symlinks to enable partial rebuilding. // Keeps track of bundle directories and symlinks to enable partial rebuilding.
ContentChanges *contentChangeMap ContentChanges *contentChangeMap
// If enabled, keeps a revision map for all content. init *hugoSitesInit
gitInfo *gitInfo
*fatalErrorHandler
} }
func (h *HugoSites) siteInfos() SiteInfos { type fatalErrorHandler struct {
infos := make(SiteInfos, len(h.Sites)) mu sync.Mutex
h *HugoSites
err error
done bool
donec chan bool // will be closed when done
}
// FatalError error is used in some rare situations where it does not make sense to
// continue processing, to abort as soon as possible and log the error.
func (f *fatalErrorHandler) FatalError(err error) {
f.mu.Lock()
defer f.mu.Unlock()
if !f.done {
f.done = true
close(f.donec)
}
f.err = err
}
func (f *fatalErrorHandler) getErr() error {
f.mu.Lock()
defer f.mu.Unlock()
return f.err
}
func (f *fatalErrorHandler) Done() <-chan bool {
return f.donec
}
type hugoSitesInit struct {
// Loads the data from all of the /data folders.
data *lazy.Init
// Loads the Git info for all the pages if enabled.
gitInfo *lazy.Init
// Maps page translations.
translations *lazy.Init
}
func (h *hugoSitesInit) Reset() {
h.data.Reset()
h.gitInfo.Reset()
h.translations.Reset()
}
func (h *HugoSites) Data() map[string]interface{} {
if _, err := h.init.data.Do(); err != nil {
h.SendError(errors.Wrap(err, "failed to load data"))
return nil
}
return h.data
}
func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
if _, err := h.init.gitInfo.Do(); err != nil {
return nil, err
}
if h.gitInfo == nil {
return nil, nil
}
return h.gitInfo.forPage(p), nil
}
func (h *HugoSites) siteInfos() page.Sites {
infos := make(page.Sites, len(h.Sites))
for i, site := range h.Sites { for i, site := range h.Sites {
infos[i] = &site.Info infos[i] = &site.Info
} }
@ -106,7 +197,7 @@ func (h *HugoSites) IsMultihost() bool {
func (h *HugoSites) LanguageSet() map[string]bool { func (h *HugoSites) LanguageSet() map[string]bool {
set := make(map[string]bool) set := make(map[string]bool)
for _, s := range h.Sites { for _, s := range h.Sites {
set[s.Language.Lang] = true set[s.language.Lang] = true
} }
return set return set
} }
@ -129,14 +220,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
func (h *HugoSites) langSite() map[string]*Site { func (h *HugoSites) langSite() map[string]*Site {
m := make(map[string]*Site) m := make(map[string]*Site)
for _, s := range h.Sites { for _, s := range h.Sites {
m[s.Language.Lang] = s m[s.language.Lang] = s
} }
return m return m
} }
// GetContentPage finds a Page with content given the absolute filename. // GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found. // Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) *Page { func (h *HugoSites) GetContentPage(filename string) page.Page {
for _, s := range h.Sites { for _, s := range h.Sites {
pos := s.rawAllPages.findPagePosByFilename(filename) pos := s.rawAllPages.findPagePosByFilename(filename)
if pos == -1 { if pos == -1 {
@ -178,10 +269,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
running: cfg.Running, running: cfg.Running,
multilingual: langConfig, multilingual: langConfig,
multihost: cfg.Cfg.GetBool("multihost"), multihost: cfg.Cfg.GetBool("multihost"),
Sites: sites} Sites: sites,
init: &hugoSitesInit{
data: lazy.New(),
gitInfo: lazy.New(),
translations: lazy.New(),
},
}
h.fatalErrorHandler = &fatalErrorHandler{
h: h,
donec: make(chan bool),
}
h.init.data.Add(func() (interface{}, error) {
err := h.loadData(h.PathSpec.BaseFs.Data.Fs)
return err, nil
})
h.init.translations.Add(func() (interface{}, error) {
if len(h.Sites) > 1 {
allTranslations := pagesToTranslationsMap(h.Sites)
assignTranslationsToPages(allTranslations, h.Sites)
}
return nil, nil
})
h.init.gitInfo.Add(func() (interface{}, error) {
err := h.loadGitInfo()
return nil, err
})
for _, s := range sites { for _, s := range sites {
s.owner = h s.h = h
} }
if err := applyDeps(cfg, sites...); err != nil { if err := applyDeps(cfg, sites...); err != nil {
@ -197,14 +318,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
h.ContentChanges = contentChangeTracker h.ContentChanges = contentChangeTracker
} }
if err := h.initGitInfo(); err != nil {
return nil, err
}
return h, nil return h, nil
} }
func (h *HugoSites) initGitInfo() error { func (h *HugoSites) loadGitInfo() error {
if h.Cfg.GetBool("enableGitInfo") { if h.Cfg.GetBool("enableGitInfo") {
gi, err := newGitInfo(h.Cfg) gi, err := newGitInfo(h.Cfg)
if err != nil { if err != nil {
@ -247,16 +364,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
d.Site = &s.Info d.Site = &s.Info
siteConfig, err := loadSiteConfig(s.Language) siteConfig, err := loadSiteConfig(s.language)
if err != nil { if err != nil {
return err return err
} }
s.siteConfig = siteConfig s.siteConfigConfig = siteConfig
s.siteRefLinker, err = newSiteRefLinker(s.Language, s) s.siteRefLinker, err = newSiteRefLinker(s.language, s)
return err return err
} }
cfg.Language = s.Language cfg.Language = s.language
cfg.MediaTypes = s.mediaTypesConfig cfg.MediaTypes = s.mediaTypesConfig
cfg.OutputFormats = s.outputFormatsConfig cfg.OutputFormats = s.outputFormatsConfig
@ -347,11 +464,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
return sites, nil return sites, nil
} }
// Reset resets the sites and template caches, making it ready for a full rebuild. // Reset resets the sites and template caches etc., making it ready for a full rebuild.
func (h *HugoSites) reset() { func (h *HugoSites) reset(config *BuildCfg) {
for i, s := range h.Sites { if config.ResetState {
h.Sites[i] = s.reset() for i, s := range h.Sites {
h.Sites[i] = s.reset()
if r, ok := s.Fs.Destination.(hugofs.Reseter); ok {
r.Reset()
}
}
} }
h.fatalErrorHandler = &fatalErrorHandler{
h: h,
donec: make(chan bool),
}
h.init.Reset()
} }
// resetLogs resets the log counters etc. Used to do a new build on the same sites. // resetLogs resets the log counters etc. Used to do a new build on the same sites.
@ -387,7 +516,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
h.Sites = sites h.Sites = sites
for _, s := range sites { for _, s := range sites {
s.owner = h s.h = h
} }
if err := applyDeps(depsCfg, sites...); err != nil { if err := applyDeps(depsCfg, sites...); err != nil {
@ -435,7 +564,10 @@ type BuildCfg struct {
// Note that a page does not have to have a content page / file. // Note that a page does not have to have a content page / file.
// For regular builds, this will allways return true. // For regular builds, this will allways return true.
// TODO(bep) rename/work this. // TODO(bep) rename/work this.
func (cfg *BuildCfg) shouldRender(p *Page) bool { func (cfg *BuildCfg) shouldRender(p *pageState) bool {
if !p.render {
return false
}
if p.forceRender { if p.forceRender {
p.forceRender = false p.forceRender = false
return true return true
@ -445,15 +577,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool {
return true return true
} }
if cfg.RecentlyVisited[p.RelPermalink()] { if cfg.whatChanged != nil && p.File() != nil {
if cfg.PartialReRender { return cfg.whatChanged.files[p.File().Filename()]
_ = p.initMainOutputFormat()
}
return true
}
if cfg.whatChanged != nil && p.File != nil {
return cfg.whatChanged.files[p.File.Filename()]
} }
return false return false
@ -477,100 +602,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
return nil return nil
} }
// TODO(bep) DRY
sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
s := h.Sites[0] s := h.Sites[0]
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"} smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
sitemapDefault.Filename, h.toSiteInfos(), smLayouts...) s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
}
func (h *HugoSites) assignMissingTranslations() error {
// This looks heavy, but it should be a small number of nodes by now.
allPages := h.findAllPagesByKindNotIn(KindPage)
for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
nodes := h.findPagesByKindIn(nodeType, allPages)
// Assign translations
for _, t1 := range nodes {
for _, t2 := range nodes {
if t1.isNewTranslation(t2) {
t1.translations = append(t1.translations, t2)
}
}
}
}
// Now we can sort the translations.
for _, p := range allPages {
if len(p.translations) > 0 {
pageBy(languagePageSort).Sort(p.translations)
}
}
return nil
} }
// createMissingPages creates home page, taxonomies etc. that isnt't created as an // createMissingPages creates home page, taxonomies etc. that isnt't created as an
// effect of having a content file. // effect of having a content file.
func (h *HugoSites) createMissingPages() error { func (h *HugoSites) createMissingPages() error {
var newPages Pages var newPages pageStatePages
for _, s := range h.Sites { for _, s := range h.Sites {
if s.isEnabled(KindHome) { if s.isEnabled(page.KindHome) {
// home pages // home pages
home := s.findPagesByKind(KindHome) homes := s.findWorkPagesByKind(page.KindHome)
if len(home) > 1 { if len(homes) > 1 {
panic("Too many homes") panic("Too many homes")
} }
if len(home) == 0 { var home *pageState
n := s.newHomePage() if len(homes) == 0 {
s.Pages = append(s.Pages, n) home = s.newPage(page.KindHome)
newPages = append(newPages, n) s.workAllPages = append(s.workAllPages, home)
newPages = append(newPages, home)
} else {
home = homes[0]
} }
s.home = home
} }
// Will create content-less root sections. // Will create content-less root sections.
newSections := s.assembleSections() newSections := s.assembleSections()
s.Pages = append(s.Pages, newSections...) s.workAllPages = append(s.workAllPages, newSections...)
newPages = append(newPages, newSections...) newPages = append(newPages, newSections...)
taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
// taxonomy list and terms pages // taxonomy list and terms pages
taxonomies := s.Language.GetStringMapString("taxonomies") taxonomies := s.Language().GetStringMapString("taxonomies")
if len(taxonomies) > 0 { if len(taxonomies) > 0 {
taxonomyPages := s.findPagesByKind(KindTaxonomy) taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm) taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
// Make them navigable from WeightedPage etc.
for _, p := range taxonomyPages {
p.getTaxonomyNodeInfo().TransferValues(p)
}
for _, p := range taxonomyTermsPages {
p.getTaxonomyNodeInfo().TransferValues(p)
}
for _, plural := range taxonomies { for _, plural := range taxonomies {
if s.isEnabled(KindTaxonomyTerm) { if taxonomyTermEnabled {
foundTaxonomyTermsPage := false foundTaxonomyTermsPage := false
for _, p := range taxonomyTermsPages { for _, p := range taxonomyTermsPages {
if p.sectionsPath() == plural { if p.SectionsPath() == plural {
foundTaxonomyTermsPage = true foundTaxonomyTermsPage = true
break break
} }
} }
if !foundTaxonomyTermsPage { if !foundTaxonomyTermsPage {
n := s.newTaxonomyTermsPage(plural) n := s.newPage(page.KindTaxonomyTerm, plural)
s.Pages = append(s.Pages, n) n.getTaxonomyNodeInfo().TransferValues(n)
s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n) newPages = append(newPages, n)
} }
} }
if s.isEnabled(KindTaxonomy) { if taxonomyEnabled {
for key := range s.Taxonomies[plural] { for termKey := range s.Taxonomies[plural] {
foundTaxonomyPage := false
origKey := key foundTaxonomyPage := false
if s.Info.preserveTaxonomyNames {
key = s.PathSpec.MakePathSanitized(key)
}
for _, p := range taxonomyPages { for _, p := range taxonomyPages {
sectionsPath := p.sectionsPath() sectionsPath := p.SectionsPath()
if !strings.HasPrefix(sectionsPath, plural) { if !strings.HasPrefix(sectionsPath, plural) {
continue continue
@ -579,20 +689,21 @@ func (h *HugoSites) createMissingPages() error {
singularKey := strings.TrimPrefix(sectionsPath, plural) singularKey := strings.TrimPrefix(sectionsPath, plural)
singularKey = strings.TrimPrefix(singularKey, "/") singularKey = strings.TrimPrefix(singularKey, "/")
// Some people may have /authors/MaxMustermann etc. as paths. if singularKey == termKey {
// p.sections contains the raw values from the file system.
// See https://github.com/gohugoio/hugo/issues/4238
singularKey = s.PathSpec.MakePathSanitized(singularKey)
if singularKey == key {
foundTaxonomyPage = true foundTaxonomyPage = true
break break
} }
} }
if !foundTaxonomyPage { if !foundTaxonomyPage {
n := s.newTaxonomyPage(plural, origKey) info := s.taxonomyNodes.Get(plural, termKey)
s.Pages = append(s.Pages, n) if info == nil {
panic("no info found")
}
n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
info.TransferValues(n)
s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n) newPages = append(newPages, n)
} }
} }
@ -601,24 +712,6 @@ func (h *HugoSites) createMissingPages() error {
} }
} }
if len(newPages) > 0 {
// This resorting is unfortunate, but it also needs to be sorted
// when sections are created.
first := h.Sites[0]
first.AllPages = append(first.AllPages, newPages...)
first.AllPages.sort()
for _, s := range h.Sites {
s.Pages.sort()
}
for i := 1; i < len(h.Sites); i++ {
h.Sites[i].AllPages = first.AllPages
}
}
return nil return nil
} }
@ -628,61 +721,58 @@ func (h *HugoSites) removePageByFilename(filename string) {
} }
} }
func (h *HugoSites) setupTranslations() { func (h *HugoSites) createPageCollections() error {
for _, s := range h.Sites { for _, s := range h.Sites {
for _, p := range s.rawAllPages { for _, p := range s.rawAllPages {
if p.Kind == kindUnknown { if !s.isEnabled(p.Kind()) {
p.Kind = p.kindFromSections()
}
if !p.s.isEnabled(p.Kind) {
continue continue
} }
shouldBuild := p.shouldBuild() shouldBuild := s.shouldBuild(p)
s.updateBuildStats(p) s.buildStats.update(p)
if shouldBuild { if shouldBuild {
if p.headless { if p.m.headless {
s.headlessPages = append(s.headlessPages, p) s.headlessPages = append(s.headlessPages, p)
} else { } else {
s.Pages = append(s.Pages, p) s.workAllPages = append(s.workAllPages, p)
} }
} }
} }
} }
allPages := make(Pages, 0) allPages := newLazyPagesFactory(func() page.Pages {
var pages page.Pages
for _, s := range h.Sites {
pages = append(pages, s.Pages()...)
}
page.SortByDefault(pages)
return pages
})
allRegularPages := newLazyPagesFactory(func() page.Pages {
return h.findPagesByKindIn(page.KindPage, allPages.get())
})
for _, s := range h.Sites { for _, s := range h.Sites {
allPages = append(allPages, s.Pages...) s.PageCollections.allPages = allPages
s.PageCollections.allRegularPages = allRegularPages
} }
allPages.sort() return nil
for _, s := range h.Sites {
s.AllPages = allPages
}
// Pull over the collections from the master site
for i := 1; i < len(h.Sites); i++ {
h.Sites[i].Data = h.Sites[0].Data
}
if len(h.Sites) > 1 {
allTranslations := pagesToTranslationsMap(allPages)
assignTranslationsToPages(allTranslations, allPages)
}
} }
func (s *Site) preparePagesForRender(start bool) error { func (s *Site) preparePagesForRender(idx int) error {
for _, p := range s.Pages {
if err := p.prepareForRender(start); err != nil { for _, p := range s.workAllPages {
if err := p.initOutputFormat(idx); err != nil {
return err return err
} }
} }
for _, p := range s.headlessPages { for _, p := range s.headlessPages {
if err := p.prepareForRender(start); err != nil { if err := p.initOutputFormat(idx); err != nil {
return err return err
} }
} }
@ -691,62 +781,141 @@ func (s *Site) preparePagesForRender(start bool) error {
} }
// Pages returns all pages for all sites. // Pages returns all pages for all sites.
func (h *HugoSites) Pages() Pages { func (h *HugoSites) Pages() page.Pages {
return h.Sites[0].AllPages return h.Sites[0].AllPages()
} }
func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) { func (h *HugoSites) loadData(fs afero.Fs) (err error) {
if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 { spec := source.NewSourceSpec(h.PathSpec, fs)
p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName()) fileSystem := spec.NewFilesystem("")
err := p.shortcodeState.executeShortcodesForDelta(p) h.data = make(map[string]interface{})
for _, r := range fileSystem.Files() {
if err != nil { if err := h.handleDataFile(r); err != nil {
return err
return rawContentCopy, err
}
rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes)
if err != nil {
p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error())
} }
} }
return rawContentCopy, nil return
} }
func (s *Site) updateBuildStats(page *Page) { func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
if page.IsDraft() { var current map[string]interface{}
s.draftCount++
f, err := r.Open()
if err != nil {
return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName())
}
defer f.Close()
// Crawl in data tree to insert data
current = h.data
keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
// The first path element is the virtual folder (typically theme name), which is
// not part of the key.
if len(keyParts) > 1 {
for _, key := range keyParts[1:] {
if key != "" {
if _, ok := current[key]; !ok {
current[key] = make(map[string]interface{})
}
current = current[key].(map[string]interface{})
}
}
} }
if page.IsFuture() { data, err := h.readData(r)
s.futureCount++ if err != nil {
return h.errWithFileContext(err, r)
} }
if page.IsExpired() { if data == nil {
s.expiredCount++ return nil
} }
// filepath.Walk walks the files in lexical order, '/' comes before '.'
// this warning could happen if
// 1. A theme uses the same key; the main data folder wins
// 2. A sub folder uses the same key: the sub folder wins
higherPrecedentData := current[r.BaseFileName()]
switch data.(type) {
case nil:
// hear the crickets?
case map[string]interface{}:
switch higherPrecedentData.(type) {
case nil:
current[r.BaseFileName()] = data
case map[string]interface{}:
// merge maps: insert entries from data for keys that
// don't already exist in higherPrecedentData
higherPrecedentMap := higherPrecedentData.(map[string]interface{})
for key, value := range data.(map[string]interface{}) {
if _, exists := higherPrecedentMap[key]; exists {
h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
} else {
higherPrecedentMap[key] = value
}
}
default:
// can't merge: higherPrecedentData is not a map
h.Log.WARN.Printf("The %T data from '%s' overridden by "+
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
}
case []interface{}:
if higherPrecedentData == nil {
current[r.BaseFileName()] = data
} else {
// we don't merge array data
h.Log.WARN.Printf("The %T data from '%s' overridden by "+
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
}
default:
h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName())
}
return nil
} }
func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages { func (h *HugoSites) errWithFileContext(err error, f source.File) error {
return h.Sites[0].findPagesByKindNotIn(kind, inPages) rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo)
if !ok {
return err
}
realFilename := rfi.RealFilename()
err, _ = herrors.WithFileContextForFile(
err,
realFilename,
realFilename,
h.SourceSpec.Fs.Source,
herrors.SimpleLineMatcher)
return err
} }
func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages { func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) {
file, err := f.Open()
if err != nil {
return nil, errors.Wrap(err, "readData: failed to open data file")
}
defer file.Close()
content := helpers.ReaderToBytes(file)
format := metadecoders.FormatFromString(f.Extension())
return metadecoders.Default.Unmarshal(content, format)
}
func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
return h.Sites[0].findPagesByKindIn(kind, inPages) return h.Sites[0].findPagesByKindIn(kind, inPages)
} }
func (h *HugoSites) findAllPagesByKind(kind string) Pages { func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages {
return h.findPagesByKindIn(kind, h.Sites[0].AllPages) var pages page.Pages
}
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
}
func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
var pages Pages
for _, s := range h.Sites { for _, s := range h.Sites {
pages = append(pages, s.findPagesByShortcode(shortcode)...) pages = append(pages, s.findPagesByShortcode(shortcode)...)
} }

View File

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -15,7 +15,12 @@ package hugolib
import ( import (
"bytes" "bytes"
"context"
"fmt" "fmt"
"runtime/trace"
"sort"
"github.com/gohugoio/hugo/output"
"errors" "errors"
@ -26,6 +31,9 @@ import (
// Build builds all sites. If filesystem events are provided, // Build builds all sites. If filesystem events are provided,
// this is considered to be a potential partial rebuild. // this is considered to be a potential partial rebuild.
func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
ctx, task := trace.NewTask(context.Background(), "Build")
defer task.End()
errCollector := h.StartErrorCollector() errCollector := h.StartErrorCollector()
errs := make(chan error) errs := make(chan error)
@ -71,22 +79,36 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
return err return err
} }
} else { } else {
if err := h.init(conf); err != nil { if err := h.initSites(conf); err != nil {
return err return err
} }
} }
if err := h.process(conf, events...); err != nil { var err error
f := func() {
err = h.process(conf, events...)
}
trace.WithRegion(ctx, "process", f)
if err != nil {
return err return err
} }
if err := h.assemble(conf); err != nil { f = func() {
err = h.assemble(conf)
}
trace.WithRegion(ctx, "assemble", f)
if err != nil {
return err return err
} }
return nil return nil
} }
prepareErr = prepare() f := func() {
prepareErr = prepare()
}
trace.WithRegion(ctx, "prepare", f)
if prepareErr != nil { if prepareErr != nil {
h.SendError(prepareErr) h.SendError(prepareErr)
} }
@ -94,7 +116,12 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
} }
if prepareErr == nil { if prepareErr == nil {
if err := h.render(conf); err != nil { var err error
f := func() {
err = h.render(conf)
}
trace.WithRegion(ctx, "render", f)
if err != nil {
h.SendError(err) h.SendError(err)
} }
} }
@ -120,6 +147,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
return err return err
} }
if err := h.fatalErrorHandler.getErr(); err != nil {
return err
}
errorCount := h.Log.ErrorCounter.Count() errorCount := h.Log.ErrorCounter.Count()
if errorCount > 0 { if errorCount > 0 {
return fmt.Errorf("logged %d error(s)", errorCount) return fmt.Errorf("logged %d error(s)", errorCount)
@ -132,17 +163,8 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
// Build lifecycle methods below. // Build lifecycle methods below.
// The order listed matches the order of execution. // The order listed matches the order of execution.
func (h *HugoSites) init(config *BuildCfg) error { func (h *HugoSites) initSites(config *BuildCfg) error {
h.reset(config)
for _, s := range h.Sites {
if s.PageCollections == nil {
s.PageCollections = newPageCollections()
}
}
if config.ResetState {
h.reset()
}
if config.NewConfig != nil { if config.NewConfig != nil {
if err := h.createSitesFromConfig(config.NewConfig); err != nil { if err := h.createSitesFromConfig(config.NewConfig); err != nil {
@ -155,28 +177,22 @@ func (h *HugoSites) init(config *BuildCfg) error {
func (h *HugoSites) initRebuild(config *BuildCfg) error { func (h *HugoSites) initRebuild(config *BuildCfg) error {
if config.NewConfig != nil { if config.NewConfig != nil {
return errors.New("Rebuild does not support 'NewConfig'.") return errors.New("rebuild does not support 'NewConfig'")
} }
if config.ResetState { if config.ResetState {
return errors.New("Rebuild does not support 'ResetState'.") return errors.New("rebuild does not support 'ResetState'")
} }
if !h.running { if !h.running {
return errors.New("Rebuild called when not in watch mode") return errors.New("rebuild called when not in watch mode")
}
if config.whatChanged.source {
// This is for the non-renderable content pages (rarely used, I guess).
// We could maybe detect if this is really needed, but it should be
// pretty fast.
h.TemplateHandler().RebuildClone()
} }
for _, s := range h.Sites { for _, s := range h.Sites {
s.resetBuildState() s.resetBuildState()
} }
h.reset(config)
h.resetLogs() h.resetLogs()
helpers.InitLoggers() helpers.InitLoggers()
@ -203,14 +219,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
} }
func (h *HugoSites) assemble(config *BuildCfg) error { func (h *HugoSites) assemble(config *BuildCfg) error {
if config.whatChanged.source {
for _, s := range h.Sites {
s.createTaxonomiesEntries()
}
}
// TODO(bep) we could probably wait and do this in one go later
h.setupTranslations()
if len(h.Sites) > 1 { if len(h.Sites) > 1 {
// The first is initialized during process; initialize the rest // The first is initialized during process; initialize the rest
@ -221,47 +229,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
} }
} }
if err := h.createPageCollections(); err != nil {
return err
}
if config.whatChanged.source { if config.whatChanged.source {
for _, s := range h.Sites { for _, s := range h.Sites {
if err := s.buildSiteMeta(); err != nil { if err := s.assembleTaxonomies(); err != nil {
return err return err
} }
} }
} }
// Create pagexs for the section pages etc. without content file.
if err := h.createMissingPages(); err != nil { if err := h.createMissingPages(); err != nil {
return err return err
} }
for _, s := range h.Sites { for _, s := range h.Sites {
for _, pages := range []Pages{s.Pages, s.headlessPages} {
for _, p := range pages {
// May have been set in front matter
if len(p.outputFormats) == 0 {
p.outputFormats = s.outputFormats[p.Kind]
}
if p.headless {
// headless = 1 output format only
p.outputFormats = p.outputFormats[:1]
}
for _, r := range p.Resources.ByType(pageResourceType) {
r.(*Page).outputFormats = p.outputFormats
}
if err := p.initPaths(); err != nil {
return err
}
}
}
s.assembleMenus()
s.refreshPageCaches()
s.setupSitePages() s.setupSitePages()
} sort.Stable(s.workAllPages)
if err := h.assignMissingTranslations(); err != nil {
return err
} }
return nil return nil
@ -269,42 +256,60 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
} }
func (h *HugoSites) render(config *BuildCfg) error { func (h *HugoSites) render(config *BuildCfg) error {
siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost}
if !config.PartialReRender { if !config.PartialReRender {
h.renderFormats = output.Formats{}
for _, s := range h.Sites { for _, s := range h.Sites {
s.initRenderFormats() s.initRenderFormats()
h.renderFormats = append(h.renderFormats, s.renderFormats...)
} }
} }
i := 0
for _, s := range h.Sites { for _, s := range h.Sites {
for i, rf := range s.renderFormats { for siteOutIdx, renderFormat := range s.renderFormats {
for _, s2 := range h.Sites { siteRenderContext.outIdx = siteOutIdx
// We render site by site, but since the content is lazily rendered siteRenderContext.sitesOutIdx = i
// and a site can "borrow" content from other sites, every site i++
// needs this set.
s2.rc = &siteRenderingContext{Format: rf}
isRenderingSite := s == s2 select {
case <-h.Done():
return nil
default:
// For the non-renderable pages, we use the content iself as
// template and we may have to re-parse and execute it for
// each output format.
h.TemplateHandler().RebuildClone()
if !config.PartialReRender { for _, s2 := range h.Sites {
if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil { // We render site by site, but since the content is lazily rendered
return err // and a site can "borrow" content from other sites, every site
// needs this set.
s2.rc = &siteRenderingContext{Format: renderFormat}
if !config.PartialReRender {
if err := s2.preparePagesForRender(siteRenderContext.sitesOutIdx); err != nil {
return err
}
} }
} }
} if !config.SkipRender {
if config.PartialReRender {
if !config.SkipRender { if err := s.renderPages(siteRenderContext); err != nil {
if config.PartialReRender { return err
if err := s.renderPages(config); err != nil { }
return err } else {
} if err := s.render(siteRenderContext); err != nil {
} else { return err
if err := s.render(config, i); err != nil { }
return err
} }
} }
} }
} }
} }
if !config.SkipRender { if !config.SkipRender {

View File

@ -7,6 +7,9 @@ import (
"runtime" "runtime"
"strings" "strings"
"testing" "testing"
"time"
"github.com/fortytw2/leaktest"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -20,25 +23,24 @@ type testSiteBuildErrorAsserter struct {
func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext { func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext {
t.assert.NotNil(err, t.name) t.assert.NotNil(err, t.name)
ferr := herrors.UnwrapErrorWithFileContext(err) ferr := herrors.UnwrapErrorWithFileContext(err)
t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, trace())) t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, stackTrace()))
return ferr return ferr
} }
func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) { func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) {
fe := t.getFileError(err) fe := t.getFileError(err)
t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, trace())) t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, stackTrace()))
} }
func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) { func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) {
// The error message will contain filenames with OS slashes. Normalize before compare. // The error message will contain filenames with OS slashes. Normalize before compare.
e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2) e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2)
t.assert.Contains(e2, e1, trace()) t.assert.Contains(e2, e1, stackTrace())
} }
func TestSiteBuildErrors(t *testing.T) { func TestSiteBuildErrors(t *testing.T) {
t.Parallel() t.Parallel()
assert := require.New(t)
const ( const (
yamlcontent = "yamlcontent" yamlcontent = "yamlcontent"
@ -88,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertCreateError: func(a testSiteBuildErrorAsserter, err error) { assertCreateError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber) a.assert.Equal(5, fe.Position().LineNumber)
assert.Equal(1, fe.Position().ColumnNumber) a.assert.Equal(1, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer) a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error()) a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error())
}, },
@ -103,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber) a.assert.Equal(5, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber) a.assert.Equal(14, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer) a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
}, },
@ -118,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber) a.assert.Equal(5, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber) a.assert.Equal(14, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer) a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
}, },
@ -143,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(7, fe.Position().LineNumber) a.assert.Equal(7, fe.Position().LineNumber)
assert.Equal("md", fe.ChromaLexer) a.assert.Equal("md", fe.ChromaLexer)
// Make sure that it contains both the content file and template // Make sure that it contains both the content file and template
a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error()) a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error())
a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error()) a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error())
@ -158,10 +160,10 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(7, fe.Position().LineNumber) a.assert.Equal(7, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber) a.assert.Equal(10, fe.Position().ColumnNumber)
assert.Equal("md", fe.ChromaLexer) a.assert.Equal("md", fe.ChromaLexer)
a.assertErrorMessage("\"content/myyaml.md:7:14\": failed to extract shortcode: template for shortcode \"nono\" not found", fe.Error()) a.assertErrorMessage(`"content/myyaml.md:7:10": failed to extract shortcode: template for shortcode "nono" not found`, fe.Error())
}, },
}, },
{ {
@ -182,8 +184,8 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(6, fe.Position().LineNumber) a.assert.Equal(6, fe.Position().LineNumber)
assert.Equal("toml", fe.ErrorContext.ChromaLexer) a.assert.Equal("toml", fe.ErrorContext.ChromaLexer)
}, },
}, },
@ -196,8 +198,8 @@ func TestSiteBuildErrors(t *testing.T) {
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(3, fe.Position().LineNumber) a.assert.Equal(3, fe.Position().LineNumber)
assert.Equal("json", fe.ErrorContext.ChromaLexer) a.assert.Equal("json", fe.ErrorContext.ChromaLexer)
}, },
}, },
@ -210,42 +212,43 @@ func TestSiteBuildErrors(t *testing.T) {
}, },
assertBuildError: func(a testSiteBuildErrorAsserter, err error) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
assert.Error(err) a.assert.Error(err)
// This is fixed in latest Go source // This is fixed in latest Go source
if regexp.MustCompile("devel|12").MatchString(runtime.Version()) { if regexp.MustCompile("devel|12").MatchString(runtime.Version()) {
fe := a.getFileError(err) fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber) a.assert.Equal(5, fe.Position().LineNumber)
assert.Equal(21, fe.Position().ColumnNumber) a.assert.Equal(21, fe.Position().ColumnNumber)
} else { } else {
assert.Contains(err.Error(), `execute of template failed: panic in Execute`) a.assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
} }
}, },
}, },
} }
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
errorAsserter := testSiteBuildErrorAsserter{ assert := require.New(t)
assert: assert, errorAsserter := testSiteBuildErrorAsserter{
name: test.name, assert: assert,
} name: test.name,
b := newTestSitesBuilder(t).WithSimpleConfigFile()
f := func(fileType, content string) string {
if fileType != test.fileType {
return content
} }
return test.fileFixer(content)
} b := newTestSitesBuilder(t).WithSimpleConfigFile()
b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1 f := func(fileType, content string) string {
if fileType != test.fileType {
return content
}
return test.fileFixer(content)
}
b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
SHORTCODE L2 SHORTCODE L2
SHORTCODE L3: SHORTCODE L3:
SHORTCODE L4: {{ .Page.Title }} SHORTCODE L4: {{ .Page.Title }}
`)) `))
b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1 b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
BASEOF L2 BASEOF L2
BASEOF L3 BASEOF L3
BASEOF L4{{ if .Title }}{{ end }} BASEOF L4{{ if .Title }}{{ end }}
@ -253,7 +256,7 @@ BASEOF L4{{ if .Title }}{{ end }}
BASEOF L6 BASEOF L6
`)) `))
b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }} b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
SINGLE L2: SINGLE L2:
SINGLE L3: SINGLE L3:
SINGLE L4: SINGLE L4:
@ -261,7 +264,7 @@ SINGLE L5: {{ .Title }} {{ .Content }}
{{ end }} {{ end }}
`)) `))
b.WithContent("myyaml.md", f(yamlcontent, `--- b.WithContent("myyaml.md", f(yamlcontent, `---
title: "The YAML" title: "The YAML"
--- ---
@ -275,7 +278,7 @@ The end.
`)) `))
b.WithContent("mytoml.md", f(tomlcontent, `+++ b.WithContent("mytoml.md", f(tomlcontent, `+++
title = "The TOML" title = "The TOML"
p1 = "v" p1 = "v"
p2 = "v" p2 = "v"
@ -288,7 +291,7 @@ Some content.
`)) `))
b.WithContent("myjson.md", f(jsoncontent, `{ b.WithContent("myjson.md", f(jsoncontent, `{
"title": "This is a title", "title": "This is a title",
"description": "This is a description." "description": "This is a description."
} }
@ -298,26 +301,30 @@ Some content.
`)) `))
createErr := b.CreateSitesE() createErr := b.CreateSitesE()
if test.assertCreateError != nil { if test.assertCreateError != nil {
test.assertCreateError(errorAsserter, createErr) test.assertCreateError(errorAsserter, createErr)
} else {
assert.NoError(createErr)
}
if createErr == nil {
buildErr := b.BuildE(BuildCfg{})
if test.assertBuildError != nil {
test.assertBuildError(errorAsserter, buildErr)
} else { } else {
assert.NoError(buildErr) assert.NoError(createErr)
} }
}
if createErr == nil {
buildErr := b.BuildE(BuildCfg{})
if test.assertBuildError != nil {
test.assertBuildError(errorAsserter, buildErr)
} else {
assert.NoError(buildErr)
}
}
})
} }
} }
// https://github.com/gohugoio/hugo/issues/5375 // https://github.com/gohugoio/hugo/issues/5375
func TestSiteBuildTimeout(t *testing.T) { func TestSiteBuildTimeout(t *testing.T) {
if !isCI() {
defer leaktest.CheckTimeout(t, 10*time.Second)()
}
b := newTestSitesBuilder(t) b := newTestSitesBuilder(t)
b.WithConfigFile("toml", ` b.WithConfigFile("toml", `
@ -342,6 +349,6 @@ title: "A page"
} }
b.CreateSites().Build(BuildCfg{}) b.CreateSites().BuildFail(BuildCfg{})
} }

View File

@ -1,16 +1,16 @@
package hugolib package hugolib
import ( import (
"bytes"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"html/template"
"os" "os"
"path/filepath" "path/filepath"
"time" "time"
"github.com/gohugoio/hugo/resources/page"
"github.com/fortytw2/leaktest" "github.com/fortytw2/leaktest"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -66,8 +66,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true)) assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true))
doc1en := enSite.RegularPages[0] doc1en := enSite.RegularPages()[0]
doc1fr := frSite.RegularPages[0] doc1fr := frSite.RegularPages()[0]
enPerm := doc1en.Permalink() enPerm := doc1en.Permalink()
enRelPerm := doc1en.RelPermalink() enRelPerm := doc1en.RelPermalink()
@ -100,7 +100,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
// Check list pages // Check list pages
b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour") b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour")
b.AssertFileContent("public/en/sect/index.html", "List", "Hello") b.AssertFileContent("public/en/sect/index.html", "List", "Hello")
b.AssertFileContent(pathMod("public/fr/plaques/frtag1/index.html"), "Taxonomy List", "Bonjour") b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour")
b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello") b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello")
// Check sitemaps // Check sitemaps
@ -126,8 +126,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`)) pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`))
b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`) b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`)
b.AssertFileContent( b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/index.xml"), pathMod("public/fr/plaques/FRtag1/index.xml"),
pathMod(`<atom:link href="http://example.com/blog/fr/plaques/frtag1/index.xml"`)) pathMod(`<atom:link href="http://example.com/blog/fr/plaques/FRtag1/index.xml"`))
b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`) b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`)
// Check paginators // Check paginators
@ -140,12 +140,12 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/")) b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/"))
b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/") b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/")
b.AssertFileContent( b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/page/1/index.html"), pathMod("public/fr/plaques/FRtag1/page/1/index.html"),
pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/frtag1/"`)) pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/FRtag1/"`))
b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`) b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`)
b.AssertFileContent( b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/page/2/index.html"), "List Page 2", "Bonjour", pathMod("public/fr/plaques/FRtag1/page/2/index.html"), "List Page 2", "Bonjour",
pathMod("http://example.com/blog/fr/plaques/frtag1/")) pathMod("http://example.com/blog/fr/plaques/FRtag1/"))
b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/") b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/")
// nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian) // nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian)
b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`) b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`)
@ -183,12 +183,12 @@ p1 = "p1en"
assert.Len(sites, 2) assert.Len(sites, 2)
nnSite := sites[0] nnSite := sites[0]
nnHome := nnSite.getPage(KindHome) nnHome := nnSite.getPage(page.KindHome)
assert.Len(nnHome.AllTranslations(), 2) assert.Len(nnHome.AllTranslations(), 2)
assert.Len(nnHome.Translations(), 1) assert.Len(nnHome.Translations(), 1)
assert.True(nnHome.IsTranslated()) assert.True(nnHome.IsTranslated())
enHome := sites[1].getPage(KindHome) enHome := sites[1].getPage(page.KindHome)
p1, err := enHome.Param("p1") p1, err := enHome.Param("p1")
assert.NoError(err) assert.NoError(err)
@ -199,9 +199,7 @@ p1 = "p1en"
assert.Equal("p1nn", p1) assert.Equal("p1nn", p1)
} }
//
func TestMultiSitesBuild(t *testing.T) { func TestMultiSitesBuild(t *testing.T) {
t.Parallel()
for _, config := range []struct { for _, config := range []struct {
content string content string
@ -211,7 +209,11 @@ func TestMultiSitesBuild(t *testing.T) {
{multiSiteYAMLConfigTemplate, "yml"}, {multiSiteYAMLConfigTemplate, "yml"},
{multiSiteJSONConfigTemplate, "json"}, {multiSiteJSONConfigTemplate, "json"},
} { } {
doTestMultiSitesBuild(t, config.content, config.suffix)
t.Run(config.suffix, func(t *testing.T) {
t.Parallel()
doTestMultiSitesBuild(t, config.content, config.suffix)
})
} }
} }
@ -228,64 +230,51 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
// Check site config // Check site config
for _, s := range sites { for _, s := range sites {
require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.Title) require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.title)
require.NotNil(t, s.disabledKinds) require.NotNil(t, s.disabledKinds)
} }
gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md")) gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
require.NotNil(t, gp1) require.NotNil(t, gp1)
require.Equal(t, "doc1", gp1.title) require.Equal(t, "doc1", gp1.Title())
gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md")) gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
require.Nil(t, gp2) require.Nil(t, gp2)
enSite := sites[0] enSite := sites[0]
enSiteHome := enSite.getPage(KindHome) enSiteHome := enSite.getPage(page.KindHome)
require.True(t, enSiteHome.IsTranslated()) require.True(t, enSiteHome.IsTranslated())
require.Equal(t, "en", enSite.Language.Lang) require.Equal(t, "en", enSite.language.Lang)
assert.Equal(5, len(enSite.RegularPages)) assert.Equal(5, len(enSite.RegularPages()))
assert.Equal(32, len(enSite.AllPages)) assert.Equal(32, len(enSite.AllPages()))
doc1en := enSite.RegularPages[0] // Check 404s
permalink := doc1en.Permalink() b.AssertFileContent("public/en/404.html", "404|en|404 Page not found")
require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink") b.AssertFileContent("public/fr/404.html", "404|fr|404 Page not found")
require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
doc2 := enSite.RegularPages[1] // Check robots.txt
permalink = doc2.Permalink() b.AssertFileContent("public/en/robots.txt", "robots|en|")
require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink") b.AssertFileContent("public/nn/robots.txt", "robots|nn|")
doc3 := enSite.RegularPages[2] b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Permalink: http://example.com/blog/en/sect/doc1-slug/")
permalink = doc3.Permalink() b.AssertFileContent("public/en/sect/doc2/index.html", "Permalink: http://example.com/blog/en/sect/doc2/")
// Note that /superbob is a custom URL set in frontmatter. b.AssertFileContent("public/superbob/index.html", "Permalink: http://example.com/blog/superbob/")
// We respect that URL literally (it can be /search.json)
// and do no not do any language code prefixing.
require.Equal(t, "http://example.com/blog/superbob/", permalink, "invalid doc3 permalink")
require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3")
b.AssertFileContent("public/superbob/index.html", "doc3|Hello|en")
require.Equal(t, doc2.PrevPage, doc3, "doc3 should follow doc2, in .PrevPage")
doc2 := enSite.RegularPages()[1]
doc3 := enSite.RegularPages()[2]
require.Equal(t, doc2.Prev(), doc3, "doc3 should follow doc2, in .PrevPage")
doc1en := enSite.RegularPages()[0]
doc1fr := doc1en.Translations()[0] doc1fr := doc1en.Translations()[0]
permalink = doc1fr.Permalink() b.AssertFileContent("public/fr/sect/doc1/index.html", "Permalink: http://example.com/blog/fr/sect/doc1/")
require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink")
require.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation") require.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation")
require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation") require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation")
require.Equal(t, "fr", doc1fr.Language().Lang) require.Equal(t, "fr", doc1fr.Language().Lang)
doc4 := enSite.AllPages[4] doc4 := enSite.AllPages()[4]
permalink = doc4.Permalink()
require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink")
require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL())
require.Len(t, doc4.Translations(), 0, "found translations for doc4") require.Len(t, doc4.Translations(), 0, "found translations for doc4")
doc5 := enSite.AllPages[5]
permalink = doc5.Permalink()
require.Equal(t, "http://example.com/blog/fr/somewhere/else/doc5/", permalink, "invalid doc5 permalink")
// Taxonomies and their URLs // Taxonomies and their URLs
require.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy") require.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy")
tags := enSite.Taxonomies["tags"] tags := enSite.Taxonomies["tags"]
@ -294,12 +283,13 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
frSite := sites[1] frSite := sites[1]
require.Equal(t, "fr", frSite.Language.Lang) require.Equal(t, "fr", frSite.language.Lang)
require.Len(t, frSite.RegularPages, 4, "should have 3 pages") require.Len(t, frSite.RegularPages(), 4, "should have 3 pages")
require.Len(t, frSite.AllPages, 32, "should have 32 total pages (including translations and nodes)") require.Len(t, frSite.AllPages(), 32, "should have 32 total pages (including translations and nodes)")
for _, frenchPage := range frSite.RegularPages { for _, frenchPage := range frSite.RegularPages() {
require.Equal(t, "fr", frenchPage.Lang()) p := frenchPage
require.Equal(t, "fr", p.Language().Lang)
} }
// See https://github.com/gohugoio/hugo/issues/4285 // See https://github.com/gohugoio/hugo/issues/4285
@ -307,10 +297,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
// isn't ideal in a multilingual setup. You want a way to get the current language version if available. // isn't ideal in a multilingual setup. You want a way to get the current language version if available.
// Now you can do lookups with translation base name to get that behaviour. // Now you can do lookups with translation base name to get that behaviour.
// Let us test all the regular page variants: // Let us test all the regular page variants:
getPageDoc1En := enSite.getPage(KindPage, filepath.ToSlash(doc1en.Path())) getPageDoc1En := enSite.getPage(page.KindPage, filepath.ToSlash(doc1en.File().Path()))
getPageDoc1EnBase := enSite.getPage(KindPage, "sect/doc1") getPageDoc1EnBase := enSite.getPage(page.KindPage, "sect/doc1")
getPageDoc1Fr := frSite.getPage(KindPage, filepath.ToSlash(doc1fr.Path())) getPageDoc1Fr := frSite.getPage(page.KindPage, filepath.ToSlash(doc1fr.File().Path()))
getPageDoc1FrBase := frSite.getPage(KindPage, "sect/doc1") getPageDoc1FrBase := frSite.getPage(page.KindPage, "sect/doc1")
require.Equal(t, doc1en, getPageDoc1En) require.Equal(t, doc1en, getPageDoc1En)
require.Equal(t, doc1fr, getPageDoc1Fr) require.Equal(t, doc1fr, getPageDoc1Fr)
require.Equal(t, doc1en, getPageDoc1EnBase) require.Equal(t, doc1en, getPageDoc1EnBase)
@ -328,35 +318,36 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault")
// Check node translations // Check node translations
homeEn := enSite.getPage(KindHome) homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn) require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3) require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang()) require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
require.Equal(t, "nn", homeEn.Translations()[1].Lang()) require.Equal(t, "nn", homeEn.Translations()[1].Language().Lang)
require.Equal(t, "På nynorsk", homeEn.Translations()[1].title) require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title())
require.Equal(t, "nb", homeEn.Translations()[2].Lang()) require.Equal(t, "nb", homeEn.Translations()[2].Language().Lang)
require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix) require.Equal(t, "På bokmål", homeEn.Translations()[2].Title(), configSuffix)
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix) require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
sectFr := frSite.getPage(KindSection, "sect") sectFr := frSite.getPage(page.KindSection, "sect")
require.NotNil(t, sectFr) require.NotNil(t, sectFr)
require.Equal(t, "fr", sectFr.Lang()) require.Equal(t, "fr", sectFr.Language().Lang)
require.Len(t, sectFr.Translations(), 1) require.Len(t, sectFr.Translations(), 1)
require.Equal(t, "en", sectFr.Translations()[0].Lang()) require.Equal(t, "en", sectFr.Translations()[0].Language().Lang)
require.Equal(t, "Sects", sectFr.Translations()[0].title) require.Equal(t, "Sects", sectFr.Translations()[0].Title())
nnSite := sites[2] nnSite := sites[2]
require.Equal(t, "nn", nnSite.Language.Lang) require.Equal(t, "nn", nnSite.language.Lang)
taxNn := nnSite.getPage(KindTaxonomyTerm, "lag") taxNn := nnSite.getPage(page.KindTaxonomyTerm, "lag")
require.NotNil(t, taxNn) require.NotNil(t, taxNn)
require.Len(t, taxNn.Translations(), 1) require.Len(t, taxNn.Translations(), 1)
require.Equal(t, "nb", taxNn.Translations()[0].Lang()) require.Equal(t, "nb", taxNn.Translations()[0].Language().Lang)
taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal") taxTermNn := nnSite.getPage(page.KindTaxonomy, "lag", "sogndal")
require.NotNil(t, taxTermNn) require.NotNil(t, taxTermNn)
require.Equal(t, taxTermNn, nnSite.getPage(page.KindTaxonomy, "LAG", "SOGNDAL"))
require.Len(t, taxTermNn.Translations(), 1) require.Len(t, taxTermNn.Translations(), 1)
require.Equal(t, "nb", taxTermNn.Translations()[0].Lang()) require.Equal(t, "nb", taxTermNn.Translations()[0].Language().Lang)
// Check sitemap(s) // Check sitemap(s)
b.AssertFileContent("public/sitemap.xml", b.AssertFileContent("public/sitemap.xml",
@ -371,59 +362,53 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
require.Len(t, enTags, 2, fmt.Sprintf("Tags in en: %v", enTags)) require.Len(t, enTags, 2, fmt.Sprintf("Tags in en: %v", enTags))
require.Len(t, frTags, 2, fmt.Sprintf("Tags in fr: %v", frTags)) require.Len(t, frTags, 2, fmt.Sprintf("Tags in fr: %v", frTags))
require.NotNil(t, enTags["tag1"]) require.NotNil(t, enTags["tag1"])
require.NotNil(t, frTags["frtag1"]) require.NotNil(t, frTags["FRtag1"])
b.AssertFileContent("public/fr/plaques/frtag1/index.html", "Frtag1|Bonjour|http://example.com/blog/fr/plaques/frtag1/") b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
b.AssertFileContent("public/en/tags/tag1/index.html", "Tag1|Hello|http://example.com/blog/en/tags/tag1/") b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/")
// Check Blackfriday config // Check Blackfriday config
require.True(t, strings.Contains(string(doc1fr.content()), "&laquo;"), string(doc1fr.content())) require.True(t, strings.Contains(content(doc1fr), "&laquo;"), content(doc1fr))
require.False(t, strings.Contains(string(doc1en.content()), "&laquo;"), string(doc1en.content())) require.False(t, strings.Contains(content(doc1en), "&laquo;"), content(doc1en))
require.True(t, strings.Contains(string(doc1en.content()), "&ldquo;"), string(doc1en.content())) require.True(t, strings.Contains(content(doc1en), "&ldquo;"), content(doc1en))
// Check that the drafts etc. are not built/processed/rendered.
assertShouldNotBuild(t, b.H)
// en and nn have custom site menus // en and nn have custom site menus
require.Len(t, frSite.Menus, 0, "fr: "+configSuffix) require.Len(t, frSite.Menus(), 0, "fr: "+configSuffix)
require.Len(t, enSite.Menus, 1, "en: "+configSuffix) require.Len(t, enSite.Menus(), 1, "en: "+configSuffix)
require.Len(t, nnSite.Menus, 1, "nn: "+configSuffix) require.Len(t, nnSite.Menus(), 1, "nn: "+configSuffix)
require.Equal(t, "Home", enSite.Menus["main"].ByName()[0].Name) require.Equal(t, "Home", enSite.Menus()["main"].ByName()[0].Name)
require.Equal(t, "Heim", nnSite.Menus["main"].ByName()[0].Name) require.Equal(t, "Heim", nnSite.Menus()["main"].ByName()[0].Name)
// Issue #1302
require.Equal(t, template.URL(""), enSite.RegularPages[0].RSSLink())
// Issue #3108 // Issue #3108
prevPage := enSite.RegularPages[0].PrevPage prevPage := enSite.RegularPages()[0].Prev()
require.NotNil(t, prevPage) require.NotNil(t, prevPage)
require.Equal(t, KindPage, prevPage.Kind) require.Equal(t, page.KindPage, prevPage.Kind())
for { for {
if prevPage == nil { if prevPage == nil {
break break
} }
require.Equal(t, KindPage, prevPage.Kind) require.Equal(t, page.KindPage, prevPage.Kind())
prevPage = prevPage.PrevPage prevPage = prevPage.Prev()
} }
// Check bundles // Check bundles
bundleFr := frSite.getPage(KindPage, "bundles/b1/index.md") b.AssertFileContent("public/fr/bundles/b1/index.html", "RelPermalink: /blog/fr/bundles/b1/|")
bundleFr := frSite.getPage(page.KindPage, "bundles/b1/index.md")
require.NotNil(t, bundleFr) require.NotNil(t, bundleFr)
require.Equal(t, "/blog/fr/bundles/b1/", bundleFr.RelPermalink()) require.Equal(t, 1, len(bundleFr.Resources()))
require.Equal(t, 1, len(bundleFr.Resources)) logoFr := bundleFr.Resources().GetMatch("logo*")
logoFr := bundleFr.Resources.GetMatch("logo*")
require.NotNil(t, logoFr) require.NotNil(t, logoFr)
require.Equal(t, "/blog/fr/bundles/b1/logo.png", logoFr.RelPermalink()) b.AssertFileContent("public/fr/bundles/b1/index.html", "Resources: image/png: /blog/fr/bundles/b1/logo.png")
b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
bundleEn := enSite.getPage(KindPage, "bundles/b1/index.en.md") bundleEn := enSite.getPage(page.KindPage, "bundles/b1/index.en.md")
require.NotNil(t, bundleEn) require.NotNil(t, bundleEn)
require.Equal(t, "/blog/en/bundles/b1/", bundleEn.RelPermalink()) b.AssertFileContent("public/en/bundles/b1/index.html", "RelPermalink: /blog/en/bundles/b1/|")
require.Equal(t, 1, len(bundleEn.Resources)) require.Equal(t, 1, len(bundleEn.Resources()))
logoEn := bundleEn.Resources.GetMatch("logo*") logoEn := bundleEn.Resources().GetMatch("logo*")
require.NotNil(t, logoEn) require.NotNil(t, logoEn)
require.Equal(t, "/blog/en/bundles/b1/logo.png", logoEn.RelPermalink()) b.AssertFileContent("public/en/bundles/b1/index.html", "Resources: image/png: /blog/en/bundles/b1/logo.png")
b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
} }
@ -442,13 +427,13 @@ func TestMultiSitesRebuild(t *testing.T) {
sites := b.H.Sites sites := b.H.Sites
fs := b.Fs fs := b.Fs
b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>") b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
enSite := sites[0] enSite := sites[0]
frSite := sites[1] frSite := sites[1]
assert.Len(enSite.RegularPages, 5) assert.Len(enSite.RegularPages(), 5)
assert.Len(frSite.RegularPages, 4) assert.Len(frSite.RegularPages(), 4)
// Verify translations // Verify translations
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello")
@ -458,6 +443,10 @@ func TestMultiSitesRebuild(t *testing.T) {
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour") b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn)
assert.Len(homeEn.Translations(), 3)
contentFs := b.H.BaseFs.Content.Fs contentFs := b.H.BaseFs.Content.Fs
for i, this := range []struct { for i, this := range []struct {
@ -478,15 +467,15 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}}, []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 4, "1 en removed") assert.Len(enSite.RegularPages(), 4, "1 en removed")
// Check build stats // Check build stats
require.Equal(t, 1, enSite.draftCount, "Draft") require.Equal(t, 1, enSite.buildStats.draftCount, "Draft")
require.Equal(t, 1, enSite.futureCount, "Future") require.Equal(t, 1, enSite.buildStats.futureCount, "Future")
require.Equal(t, 1, enSite.expiredCount, "Expired") require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired")
require.Equal(t, 0, frSite.draftCount, "Draft") require.Equal(t, 0, frSite.buildStats.draftCount, "Draft")
require.Equal(t, 1, frSite.futureCount, "Future") require.Equal(t, 1, frSite.buildStats.futureCount, "Future")
require.Equal(t, 1, frSite.expiredCount, "Expired") require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired")
}, },
}, },
{ {
@ -501,12 +490,12 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create}, {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
}, },
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6) assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages, 34) assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages, 5) assert.Len(frSite.RegularPages(), 5)
require.Equal(t, "new_fr_1", frSite.RegularPages[3].title) require.Equal(t, "new_fr_1", frSite.RegularPages()[3].Title())
require.Equal(t, "new_en_2", enSite.RegularPages[0].title) require.Equal(t, "new_en_2", enSite.RegularPages()[0].Title())
require.Equal(t, "new_en_1", enSite.RegularPages[1].title) require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
rendered := readDestination(t, fs, "public/en/new1/index.html") rendered := readDestination(t, fs, "public/en/new1/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered) require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@ -521,7 +510,7 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}}, []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6) assert.Len(enSite.RegularPages(), 6)
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "CHANGED"), doc1) require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
@ -539,8 +528,8 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename}, {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
}, },
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6, "Rename") assert.Len(enSite.RegularPages(), 6, "Rename")
require.Equal(t, "new_en_1", enSite.RegularPages[1].title) require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
rendered := readDestination(t, fs, "public/en/new1renamed/index.html") rendered := readDestination(t, fs, "public/en/new1renamed/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered) require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
}}, }},
@ -554,9 +543,9 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}}, []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6) assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages, 34) assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages, 5) assert.Len(frSite.RegularPages(), 5)
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "Template Changed"), doc1) require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
}, },
@ -571,18 +560,18 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}}, []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6) assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages, 34) assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages, 5) assert.Len(frSite.RegularPages(), 5)
docEn := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") docEn := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(docEn, "Hello"), "No Hello") require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, fs, "public/fr/sect/doc1/index.html") docFr := readDestination(t, fs, "public/fr/sect/doc1/index.html")
require.True(t, strings.Contains(docFr, "Salut"), "No Salut") require.True(t, strings.Contains(docFr, "Salut"), "No Salut")
homeEn := enSite.getPage(KindHome) homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn) require.NotNil(t, homeEn)
assert.Len(homeEn.Translations(), 3) assert.Len(homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang()) require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
}, },
}, },
@ -595,9 +584,9 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write}, {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
}, },
func(t *testing.T) { func(t *testing.T) {
assert.Len(enSite.RegularPages, 6) assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages, 34) assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages, 5) assert.Len(frSite.RegularPages(), 5)
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut") b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello") b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello")
}, },
@ -617,23 +606,6 @@ func TestMultiSitesRebuild(t *testing.T) {
this.assertFunc(t) this.assertFunc(t)
} }
// Check that the drafts etc. are not built/processed/rendered.
assertShouldNotBuild(t, b.H)
}
func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
s := sites.Sites[0]
for _, p := range s.rawAllPages {
// No HTML when not processed
require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild()))
require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName())
}
} }
func TestAddNewLanguage(t *testing.T) { func TestAddNewLanguage(t *testing.T) {
@ -671,31 +643,32 @@ title = "Svenska"
enSite := sites.Sites[0] enSite := sites.Sites[0]
svSite := sites.Sites[1] svSite := sites.Sites[1]
frSite := sites.Sites[2] frSite := sites.Sites[2]
require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang) require.True(t, enSite.language.Lang == "en", enSite.language.Lang)
require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang) require.True(t, svSite.language.Lang == "sv", svSite.language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang) require.True(t, frSite.language.Lang == "fr", frSite.language.Lang)
homeEn := enSite.getPage(KindHome) homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn) require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 4) require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())
require.Len(t, enSite.RegularPages, 5) require.Equal(t, "sv", homeEn.Translations()[0].Language().Lang)
require.Len(t, frSite.RegularPages, 4)
require.Len(t, enSite.RegularPages(), 5)
require.Len(t, frSite.RegularPages(), 4)
// Veriy Swedish site // Veriy Swedish site
require.Len(t, svSite.RegularPages, 1) require.Len(t, svSite.RegularPages(), 1)
svPage := svSite.RegularPages[0] svPage := svSite.RegularPages()[0]
require.Equal(t, "Swedish Contentfile", svPage.title) require.Equal(t, "Swedish Contentfile", svPage.Title())
require.Equal(t, "sv", svPage.Lang()) require.Equal(t, "sv", svPage.Language().Lang)
require.Len(t, svPage.Translations(), 2) require.Len(t, svPage.Translations(), 2)
require.Len(t, svPage.AllTranslations(), 3) require.Len(t, svPage.AllTranslations(), 3)
require.Equal(t, "en", svPage.Translations()[0].Lang()) require.Equal(t, "en", svPage.Translations()[0].Language().Lang)
// Regular pages have no children // Regular pages have no children
require.Len(t, svPage.Pages, 0) require.Len(t, svPage.Pages(), 0)
require.Len(t, svPage.data["Pages"], 0) require.Len(t, svPage.Data().(page.Data).Pages(), 0)
} }
@ -782,12 +755,12 @@ Some text. Some more text.
content = append(content, []string{"s2/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 2), 2, true)}...) content = append(content, []string{"s2/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 2), 2, true)}...)
b.WithSimpleConfigFile() b.WithSimpleConfigFile()
b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}`) b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
b.WithTemplates("layouts/_default/myview.html", `View: {{ len .Content }}`) b.WithTemplates("layouts/_default/myview.html", `View: {{ len .Content }}`)
b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}`) b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
b.WithTemplates("layouts/_default/list.html", ` b.WithTemplates("layouts/_default/list.html", `
Page: {{ .Paginator.PageNumber }} Page: {{ .Paginator.PageNumber }}
P: {{ path.Join .Path }} P: {{ with .File }}{{ path.Join .Path }}{{ end }}
List: {{ len .Paginator.Pages }}|List Content: {{ len .Content }} List: {{ len .Paginator.Pages }}|List Content: {{ len .Content }}
{{ $shuffled := where .Site.RegularPages "Params.multioutput" true | shuffle }} {{ $shuffled := where .Site.RegularPages "Params.multioutput" true | shuffle }}
{{ $first5 := $shuffled | first 5 }} {{ $first5 := $shuffled | first 5 }}
@ -810,7 +783,7 @@ END
if i%10 == 0 { if i%10 == 0 {
section = "s2" section = "s2"
} }
checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), 8343, contentMatchers...) checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), contentMatchers...)
} }
} }
@ -819,48 +792,158 @@ END
if i%10 == 0 { if i%10 == 0 {
section = "s2" section = "s2"
} }
checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), 8348, contentMatchers...) checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), contentMatchers...)
} }
checkContent(b, "public/s1/index.html", 184, "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n") checkContent(b, "public/s1/index.html", "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n")
checkContent(b, "public/s2/index.html", 184, "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND") checkContent(b, "public/s2/index.html", "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND")
checkContent(b, "public/index.html", 181, "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND") checkContent(b, "public/index.html", "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND")
// Chek paginated pages // Check paginated pages
for i := 2; i <= 9; i++ { for i := 2; i <= 9; i++ {
checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), 181, fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND") checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND")
} }
} }
func checkContent(s *sitesBuilder, filename string, length int, matches ...string) { func checkContent(s *sitesBuilder, filename string, matches ...string) {
content := readDestination(s.T, s.Fs, filename) content := readDestination(s.T, s.Fs, filename)
for _, match := range matches { for _, match := range matches {
if !strings.Contains(content, match) { if !strings.Contains(content, match) {
s.Fatalf("No match for %q in content for %s\n%q", match, filename, content) s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
} }
} }
if len(content) != length {
s.Fatalf("got %d expected %d", len(content), length) }
func TestTranslationsFromContentToNonContent(t *testing.T) {
b := newTestSitesBuilder(t)
b.WithConfigFile("toml", `
baseURL = "http://example.com/"
defaultContentLanguage = "en"
[languages]
[languages.en]
weight = 10
contentDir = "content/en"
[languages.nn]
weight = 20
contentDir = "content/nn"
`)
b.WithContent("en/mysection/_index.md", `
---
Title: My Section
---
`)
b.WithContent("en/_index.md", `
---
Title: My Home
---
`)
b.WithContent("en/categories/mycat/_index.md", `
---
Title: My MyCat
---
`)
b.WithContent("en/categories/_index.md", `
---
Title: My categories
---
`)
for _, lang := range []string{"en", "nn"} {
b.WithContent(lang+"/mysection/page.md", `
---
Title: My Page
categories: ["mycat"]
---
`)
}
b.Build(BuildCfg{})
for _, path := range []string{
"/",
"/mysection",
"/categories",
"/categories/mycat",
} {
t.Run(path, func(t *testing.T) {
assert := require.New(t)
s1, _ := b.H.Sites[0].getPageNew(nil, path)
s2, _ := b.H.Sites[1].getPageNew(nil, path)
assert.NotNil(s1)
assert.NotNil(s2)
assert.Equal(1, len(s1.Translations()))
assert.Equal(1, len(s2.Translations()))
assert.Equal(s2, s1.Translations()[0])
assert.Equal(s1, s2.Translations()[0])
m1 := s1.Translations().MergeByLanguage(s2.Translations())
m2 := s2.Translations().MergeByLanguage(s1.Translations())
assert.Equal(1, len(m1))
assert.Equal(1, len(m2))
})
} }
} }
// https://github.com/gohugoio/hugo/issues/5777
func TestTableOfContentsInShortcodes(t *testing.T) { func TestTableOfContentsInShortcodes(t *testing.T) {
t.Parallel() t.Parallel()
b := newMultiSiteTestDefaultBuilder(t) b := newMultiSiteTestDefaultBuilder(t)
b.WithTemplatesAdded("layouts/shortcodes/toc.html", tocShortcode) b.WithTemplatesAdded("layouts/shortcodes/toc.html", tocShortcode)
b.WithTemplatesAdded("layouts/shortcodes/wrapper.html", "{{ .Inner }}")
b.WithContent("post/simple.en.md", tocPageSimple) b.WithContent("post/simple.en.md", tocPageSimple)
b.WithContent("post/variants1.en.md", tocPageVariants1)
b.WithContent("post/variants2.en.md", tocPageVariants2)
b.WithContent("post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings) b.WithContent("post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings)
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
b.AssertFileContent("public/en/post/simple/index.html", tocPageSimpleExpected) b.AssertFileContent("public/en/post/simple/index.html",
tocPageSimpleExpected,
// Make sure it is inserted twice
`TOC1: <nav id="TableOfContents">`,
`TOC2: <nav id="TableOfContents">`,
)
b.AssertFileContentFn("public/en/post/variants1/index.html", func(s string) bool {
return strings.Count(s, "TableOfContents") == 4
})
b.AssertFileContentFn("public/en/post/variants2/index.html", func(s string) bool {
return strings.Count(s, "TableOfContents") == 6
})
b.AssertFileContent("public/en/post/withSCInHeading/index.html", tocPageWithShortcodesInHeadingsExpected) b.AssertFileContent("public/en/post/withSCInHeading/index.html", tocPageWithShortcodesInHeadingsExpected)
} }
var tocShortcode = ` var tocShortcode = `
{{ .Page.TableOfContents }} TOC1: {{ .Page.TableOfContents }}
TOC2: {{ .Page.TableOfContents }}
` `
func TestSelfReferencedContentInShortcode(t *testing.T) { func TestSelfReferencedContentInShortcode(t *testing.T) {
@ -901,6 +984,41 @@ Even more text.
Lorem ipsum... Lorem ipsum...
` `
var tocPageVariants1 = `---
title: tocTest
publishdate: "2000-01-01"
---
Variant 1:
{{% wrapper %}}
{{< toc >}}
{{% /wrapper %}}
# Heading 1
Variant 3:
{{% toc %}}
`
var tocPageVariants2 = `---
title: tocTest
publishdate: "2000-01-01"
---
Variant 1:
{{% wrapper %}}
{{< toc >}}
{{% /wrapper %}}
# Heading 1
Variant 2:
{{< wrapper >}}
{{< toc >}}
{{< /wrapper >}}
Variant 3:
{{% toc %}}
`
var tocPageSimpleExpected = `<nav id="TableOfContents"> var tocPageSimpleExpected = `<nav id="TableOfContents">
<ul> <ul>
<li><a href="#1">Heading 1</a> <li><a href="#1">Heading 1</a>
@ -958,6 +1076,7 @@ paginate = 1
disablePathToLower = true disablePathToLower = true
defaultContentLanguage = "{{ .DefaultContentLanguage }}" defaultContentLanguage = "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }} defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }}
enableRobotsTXT = true
[permalinks] [permalinks]
other = "/somewhere/else/:filename" other = "/somewhere/else/:filename"
@ -1015,6 +1134,7 @@ disablePathToLower: true
paginate: 1 paginate: 1
defaultContentLanguage: "{{ .DefaultContentLanguage }}" defaultContentLanguage: "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }} defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }}
enableRobotsTXT: true
permalinks: permalinks:
other: "/somewhere/else/:filename" other: "/somewhere/else/:filename"
@ -1073,6 +1193,7 @@ var multiSiteJSONConfigTemplate = `
"disablePathToLower": true, "disablePathToLower": true,
"defaultContentLanguage": "{{ .DefaultContentLanguage }}", "defaultContentLanguage": "{{ .DefaultContentLanguage }}",
"defaultContentLanguageInSubdir": true, "defaultContentLanguageInSubdir": true,
"enableRobotsTXT": true,
"permalinks": { "permalinks": {
"other": "/somewhere/else/:filename" "other": "/somewhere/else/:filename"
}, },
@ -1170,7 +1291,23 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
b, err := afero.ReadFile(fs, filename) b, err := afero.ReadFile(fs, filename)
if err != nil { if err != nil {
// Print some debug info // Print some debug info
root := strings.Split(filename, helpers.FilePathSeparator)[0] hadSlash := strings.HasPrefix(filename, helpers.FilePathSeparator)
start := 0
if hadSlash {
start = 1
}
end := start + 1
parts := strings.Split(filename, helpers.FilePathSeparator)
if parts[start] == "work" {
end++
}
root := filepath.Join(parts[start:end]...)
if hadSlash {
root = helpers.FilePathSeparator + root
}
helpers.PrintFs(fs, root, os.Stdout) helpers.PrintFs(fs, root, os.Stdout)
Fatalf(t, "Failed to read file: %s", err) Fatalf(t, "Failed to read file: %s", err)
} }
@ -1262,8 +1399,8 @@ NOTE: slug should be used as URL
title: doc1 title: doc1
weight: 1 weight: 1
plaques: plaques:
- frtag1 - FRtag1
- frtag2 - FRtag2
publishdate: "2000-01-04" publishdate: "2000-01-04"
--- ---
# doc1 # doc1
@ -1293,7 +1430,7 @@ aliases: [/en/al/alias1,/al/alias2/]
tags: tags:
- tag2 - tag2
- tag1 - tag1
url: /superbob url: /superbob/
--- ---
# doc3 # doc3
*some content* *some content*
@ -1303,7 +1440,7 @@ NOTE: third 'en' doc, should trigger pagination on home page.
title: doc4 title: doc4
weight: 4 weight: 4
plaques: plaques:
- frtag1 - FRtag1
publishdate: "2000-01-05" publishdate: "2000-01-05"
--- ---
# doc4 # doc4

View File

@ -3,6 +3,8 @@ package hugolib
import ( import (
"testing" "testing"
"github.com/gohugoio/hugo/resources/page"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -55,7 +57,7 @@ languageName = "Nynorsk"
s1 := b.H.Sites[0] s1 := b.H.Sites[0]
s1h := s1.getPage(KindHome) s1h := s1.getPage(page.KindHome)
assert.True(s1h.IsTranslated()) assert.True(s1h.IsTranslated())
assert.Len(s1h.Translations(), 2) assert.Len(s1h.Translations(), 2)
assert.Equal("https://example.com/docs/", s1h.Permalink()) assert.Equal("https://example.com/docs/", s1h.Permalink())
@ -66,9 +68,8 @@ languageName = "Nynorsk"
// For multihost, we never want any content in the root. // For multihost, we never want any content in the root.
// //
// check url in front matter: // check url in front matter:
pageWithURLInFrontMatter := s1.getPage(KindPage, "sect/doc3.en.md") pageWithURLInFrontMatter := s1.getPage(page.KindPage, "sect/doc3.en.md")
assert.NotNil(pageWithURLInFrontMatter) assert.NotNil(pageWithURLInFrontMatter)
assert.Equal("/superbob", pageWithURLInFrontMatter.URL())
assert.Equal("/docs/superbob/", pageWithURLInFrontMatter.RelPermalink()) assert.Equal("/docs/superbob/", pageWithURLInFrontMatter.RelPermalink())
b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en") b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en")
@ -78,7 +79,7 @@ languageName = "Nynorsk"
s2 := b.H.Sites[1] s2 := b.H.Sites[1]
s2h := s2.getPage(KindHome) s2h := s2.getPage(page.KindHome)
assert.Equal("https://example.fr/", s2h.Permalink()) assert.Equal("https://example.fr/", s2h.Permalink())
b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt") b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt")
@ -94,22 +95,19 @@ languageName = "Nynorsk"
// Check bundles // Check bundles
bundleEn := s1.getPage(KindPage, "bundles/b1/index.en.md") bundleEn := s1.getPage(page.KindPage, "bundles/b1/index.en.md")
require.NotNil(t, bundleEn) require.NotNil(t, bundleEn)
require.Equal(t, "/docs/bundles/b1/", bundleEn.RelPermalink()) require.Equal(t, "/docs/bundles/b1/", bundleEn.RelPermalink())
require.Equal(t, 1, len(bundleEn.Resources)) require.Equal(t, 1, len(bundleEn.Resources()))
logoEn := bundleEn.Resources.GetMatch("logo*")
require.NotNil(t, logoEn)
require.Equal(t, "/docs/bundles/b1/logo.png", logoEn.RelPermalink())
b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
bundleFr := s2.getPage(KindPage, "bundles/b1/index.md") b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
b.AssertFileContent("public/en/bundles/b1/index.html", " image/png: /docs/bundles/b1/logo.png")
bundleFr := s2.getPage(page.KindPage, "bundles/b1/index.md")
require.NotNil(t, bundleFr) require.NotNil(t, bundleFr)
require.Equal(t, "/bundles/b1/", bundleFr.RelPermalink()) require.Equal(t, "/bundles/b1/", bundleFr.RelPermalink())
require.Equal(t, 1, len(bundleFr.Resources)) require.Equal(t, 1, len(bundleFr.Resources()))
logoFr := bundleFr.Resources.GetMatch("logo*")
require.NotNil(t, logoFr)
require.Equal(t, "/bundles/b1/logo.png", logoFr.RelPermalink())
b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
b.AssertFileContent("public/fr/bundles/b1/index.html", " image/png: /bundles/b1/logo.png")
} }

303
hugolib/hugo_smoke_test.go Normal file
View File

@ -0,0 +1,303 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestSmoke(t *testing.T) {
t.Parallel()
assert := require.New(t)
const configFile = `
baseURL = "https://example.com"
title = "Simple Site"
rssLimit = 3
defaultContentLanguage = "en"
enableRobotsTXT = true
[languages]
[languages.en]
weight = 1
title = "In English"
[languages.no]
weight = 2
title = "På norsk"
[params]
hugo = "Rules!"
[outputs]
home = ["HTML", "JSON", "CSV", "RSS"]
`
const pageContentAndSummaryDivider = `---
title: Page with outputs
hugo: "Rocks!"
outputs: ["HTML", "JSON"]
tags: [ "hugo" ]
aliases: [ "/a/b/c" ]
---
This is summary.
<!--more-->
This is content with some shortcodes.
Shortcode 1: {{< sc >}}.
Shortcode 2: {{< sc >}}.
`
const pageContentWithMarkdownShortcodes = `---
title: Page with markdown shortcode
hugo: "Rocks!"
outputs: ["HTML", "JSON"]
---
This is summary.
<!--more-->
This is content[^a].
# Header above
{{% markdown-shortcode %}}
# Header inside
Some **markdown**.[^b]
{{% /markdown-shortcode %}}
# Heder below
Some more content[^c].
Footnotes:
[^a]: Fn 1
[^b]: Fn 2
[^c]: Fn 3
`
var pageContentAutoSummary = strings.Replace(pageContentAndSummaryDivider, "<!--more-->", "", 1)
b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", `
Some **Markdown** in shortcode.
{{ .Inner }}
`)
b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", `
Some **Markdown** in JSON shortcode.
{{ .Inner }}
`)
for i := 1; i <= 11; i++ {
if i%2 == 0 {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider)
b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider)
} else {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary)
}
}
for i := 1; i <= 5; i++ {
// Root section pages
b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary)
}
// https://github.com/gohugoio/hugo/issues/4695
b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes)
// Add one bundle
b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider)
b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV")
const (
commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
commonListTemplateNoPaginator = `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
commonListTemplate = commonPaginatorTemplate + `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
paramsTemplate = `|Params: {{ .Params.hugo }}`
treeNavTemplate = `|CurrentSection: {{ .CurrentSection }}`
)
b.WithTemplates(
"_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}",
"_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator,
"_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator,
"_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate,
"_default/single.json", "JSON: Single"+commonPageTemplate,
// For .Render test
"_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate,
"_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate,
"_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate,
"404.html", "{{ .Kind }}|{{ .Title }}|Page not found",
"shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate,
"shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate,
"shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate,
)
b.CreateSites().Build(BuildCfg{})
b.AssertFileContent("public/blog/page1/index.html",
"This is content with some shortcodes.",
"Page with outputs",
"Pages: Pages(0)",
"RelPermalink: /blog/page1/|",
"Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.",
"Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.",
"Prev: /blog/page10/|Next: /blog/mybundle/",
"PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/",
"Summary: This is summary.",
"CurrentSection: Page(/blog)",
)
b.AssertFileContent("public/blog/page1/index.json",
"JSON: Single|page|Page with outputs|",
"SON: Shortcode: |sc|0||")
b.AssertFileContent("public/index.html",
"home|In English",
"Site params: Rules",
"Pages: Pages(18)|Data Pages: Pages(18)",
"Paginator: 1",
"First Site: In English",
"RelPermalink: /",
)
b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/")
// Check RSS
rssHome := b.FileContent("public/index.xml")
assert.Contains(rssHome, `<atom:link href="https://example.com/index.xml" rel="self" type="application/rss+xml" />`)
assert.Equal(3, strings.Count(rssHome, "<item>")) // rssLimit = 3
// .Render should use template/content from the current output format
// even if that output format isn't configured for that page.
b.AssertFileContent(
"public/index.json",
"Render 0: page|JSON: LI|false|Params: Rocks!",
)
b.AssertFileContent(
"public/index.html",
"Render 0: page|HTML: LI|false|Params: Rocks!|",
)
b.AssertFileContent(
"public/index.csv",
"Render 0: page|CSV: LI|false|Params: Rocks!|",
)
// Check bundled resources
b.AssertFileContent(
"public/blog/mybundle/index.html",
"Resources: 1",
)
// Check pages in root section
b.AssertFileContent(
"public/root3/index.html",
"Single|page|Page with outputs|root3.md|",
"Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/",
)
b.AssertFileContent(
"public/root3/index.json", "Shortcode 1: JSON:")
// Paginators
b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`)
b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2")
// 404
b.AssertFileContent("public/404.html", "404|404 Page not found")
// Sitemaps
b.AssertFileContent("public/en/sitemap.xml", "<loc>https://example.com/blog/</loc>")
b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`)
b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/no/sitemap.xml</loc>")
// robots.txt
b.AssertFileContent("public/robots.txt", `User-agent: *`)
// Aliases
b.AssertFileContent("public/a/b/c/index.html", `refresh`)
// Markdown vs shortcodes
// Check that all footnotes are grouped (even those from inside the shortcode)
b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*<ol>.*Fn 1.*Fn 2.*Fn 3.*</ol>`)
}
// https://github.com/golang/go/issues/30286
func TestDataRace(t *testing.T) {
const page = `
---
title: "The Page"
outputs: ["HTML", "JSON"]
---
The content.
`
b := newTestSitesBuilder(t).WithSimpleConfigFile()
for i := 1; i <= 50; i++ {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), page)
}
b.WithContent("_index.md", `
---
title: "The Home"
outputs: ["HTML", "JSON", "CSV", "RSS"]
---
The content.
`)
commonTemplate := `{{ .Data.Pages }}`
b.WithTemplatesAdded("_default/single.html", "HTML Single: "+commonTemplate)
b.WithTemplatesAdded("_default/list.html", "HTML List: "+commonTemplate)
b.CreateSites().Build(BuildCfg{})
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -19,6 +19,8 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/gohugoio/hugo/resources/page"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -99,15 +101,19 @@ Content.
section := "sect" section := "sect"
var contentRoot = func(lang string) string { var contentRoot = func(lang string) string {
contentRoot := "content/main"
switch lang { switch lang {
case "nn": case "nn":
contentRoot = "content/norsk" return "content/norsk"
case "sv": case "sv":
contentRoot = "content/svensk" return "content/svensk"
default:
return "content/main"
} }
return contentRoot + "/" + section
}
var contentSectionRoot = func(lang string) string {
return contentRoot(lang) + "/" + section
} }
for _, lang := range []string{"en", "nn", "sv"} { for _, lang := range []string{"en", "nn", "sv"} {
@ -124,7 +130,7 @@ Content.
} }
base := fmt.Sprintf("p-%s-%d", lang, j) base := fmt.Sprintf("p-%s-%d", lang, j)
slug := fmt.Sprintf("%s", base) slug := base
langID := "" langID := ""
if lang == "sv" && j%4 == 0 { if lang == "sv" && j%4 == 0 {
@ -139,7 +145,7 @@ Content.
slug += langID slug += langID
contentRoot := contentRoot(lang) contentRoot := contentSectionRoot(lang)
filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID)) filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID))
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j)) contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j))
@ -148,7 +154,7 @@ Content.
// Put common translations in all of them // Put common translations in all of them
for i, lang := range []string{"en", "nn", "sv"} { for i, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang) contentRoot := contentSectionRoot(lang)
slug := fmt.Sprintf("common_%s", lang) slug := fmt.Sprintf("common_%s", lang)
@ -173,7 +179,7 @@ Content.
// Add a bundle with some images // Add a bundle with some images
for i, lang := range []string{"en", "nn", "sv"} { for i, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang) contentRoot := contentSectionRoot(lang)
slug := fmt.Sprintf("bundle_%s", lang) slug := fmt.Sprintf("bundle_%s", lang)
filename := filepath.Join(contentRoot, "mybundle", "index.md") filename := filepath.Join(contentRoot, "mybundle", "index.md")
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i)) contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i))
@ -190,11 +196,20 @@ Content.
} }
// Add some static files inside the content dir
// https://github.com/gohugoio/hugo/issues/5759
for _, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang)
for i := 0; i < 2; i++ {
filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i))
contentFiles = append(contentFiles, filename, lang)
}
}
b := newTestSitesBuilder(t) b := newTestSitesBuilder(t)
b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites() b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites()
_ = os.Stdout _ = os.Stdout
//printFs(b.H.BaseFs.ContentFs, "/", os.Stdout)
b.Build(BuildCfg{}) b.Build(BuildCfg{})
@ -204,11 +219,14 @@ Content.
nnSite := b.H.Sites[1] nnSite := b.H.Sites[1]
svSite := b.H.Sites[2] svSite := b.H.Sites[2]
//dumpPages(nnSite.RegularPages...) b.AssertFileContent("/my/project/public/en/mystatic/file1.yaml", "en")
assert.Equal(12, len(nnSite.RegularPages)) b.AssertFileContent("/my/project/public/nn/mystatic/file1.yaml", "nn")
assert.Equal(13, len(enSite.RegularPages))
assert.Equal(10, len(svSite.RegularPages)) //dumpPages(nnSite.RegularPages...)
assert.Equal(12, len(nnSite.RegularPages()))
assert.Equal(13, len(enSite.RegularPages()))
assert.Equal(10, len(svSite.RegularPages()))
svP2, err := svSite.getPageNew(nil, "/sect/page2.md") svP2, err := svSite.getPageNew(nil, "/sect/page2.md")
assert.NoError(err) assert.NoError(err)
@ -217,9 +235,9 @@ Content.
enP2, err := enSite.getPageNew(nil, "/sect/page2.md") enP2, err := enSite.getPageNew(nil, "/sect/page2.md")
assert.NoError(err) assert.NoError(err)
assert.Equal("en", enP2.Lang()) assert.Equal("en", enP2.Language().Lang)
assert.Equal("sv", svP2.Lang()) assert.Equal("sv", svP2.Language().Lang)
assert.Equal("nn", nnP2.Lang()) assert.Equal("nn", nnP2.Language().Lang)
content, _ := nnP2.Content() content, _ := nnP2.Content()
assert.Contains(content, "SVP3-REF: https://example.org/sv/sect/p-sv-3/") assert.Contains(content, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
@ -241,10 +259,10 @@ Content.
assert.NoError(err) assert.NoError(err)
assert.Equal("https://example.org/nn/sect/p-nn-3/", nnP3Ref) assert.Equal("https://example.org/nn/sect/p-nn-3/", nnP3Ref)
for i, p := range enSite.RegularPages { for i, p := range enSite.RegularPages() {
j := i + 1 j := i + 1
msg := fmt.Sprintf("Test %d", j) msg := fmt.Sprintf("Test %d", j)
assert.Equal("en", p.Lang(), msg) assert.Equal("en", p.Language().Lang, msg)
assert.Equal("sect", p.Section()) assert.Equal("sect", p.Section())
if j < 9 { if j < 9 {
if j%4 == 0 { if j%4 == 0 {
@ -256,20 +274,20 @@ Content.
} }
// Check bundles // Check bundles
bundleEn := enSite.RegularPages[len(enSite.RegularPages)-1] bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1]
bundleNn := nnSite.RegularPages[len(nnSite.RegularPages)-1] bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1]
bundleSv := svSite.RegularPages[len(svSite.RegularPages)-1] bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1]
assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink()) assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink())
assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink()) assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink())
assert.Equal(4, len(bundleEn.Resources)) assert.Equal(4, len(bundleEn.Resources()))
assert.Equal(4, len(bundleNn.Resources)) assert.Equal(4, len(bundleNn.Resources()))
assert.Equal(4, len(bundleSv.Resources)) assert.Equal(4, len(bundleSv.Resources()))
assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources.GetMatch("logo*").RelPermalink()) b.AssertFileContent("/my/project/public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png")
assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources.GetMatch("logo*").RelPermalink()) b.AssertFileContent("/my/project/public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png")
assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources.GetMatch("logo*").RelPermalink()) b.AssertFileContent("/my/project/public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png")
b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv") b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn") b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn")
@ -278,9 +296,9 @@ Content.
b.AssertFileContent("/my/project/public/sv/sect/mybundle/logo.png", "PNG Data") b.AssertFileContent("/my/project/public/sv/sect/mybundle/logo.png", "PNG Data")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/logo.png", "PNG Data") b.AssertFileContent("/my/project/public/nn/sect/mybundle/logo.png", "PNG Data")
nnSect := nnSite.getPage(KindSection, "sect") nnSect := nnSite.getPage(page.KindSection, "sect")
assert.NotNil(nnSect) assert.NotNil(nnSect)
assert.Equal(12, len(nnSect.Pages)) assert.Equal(12, len(nnSect.Pages()))
nnHome, _ := nnSite.Info.Home() nnHome, _ := nnSite.Info.Home()
assert.Equal("/nn/", nnHome.RelPermalink()) assert.Equal("/nn/", nnHome.RelPermalink())

View File

@ -1,60 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
// An Image contains metadata for images + image sitemaps
// https://support.google.com/webmasters/answer/178636?hl=en
type Image struct {
// The URL of the image. In some cases, the image URL may not be on the
// same domain as your main site. This is fine, as long as both domains
// are verified in Webmaster Tools. If, for example, you use a
// content delivery network (CDN) to host your images, make sure that the
// hosting site is verified in Webmaster Tools OR that you submit your
// sitemap using robots.txt. In addition, make sure that your robots.txt
// file doesnt disallow the crawling of any content you want indexed.
URL string
Title string
Caption string
AltText string
// The geographic location of the image. For example,
// <image:geo_location>Limerick, Ireland</image:geo_location>.
GeoLocation string
// A URL to the license of the image.
License string
}
// A Video contains metadata for videos + video sitemaps
// https://support.google.com/webmasters/answer/80471?hl=en
type Video struct {
ThumbnailLoc string
Title string
Description string
ContentLoc string
PlayerLoc string
Duration string
ExpirationDate string
Rating string
ViewCount string
PublicationDate string
FamilyFriendly string
Restriction string
GalleryLoc string
Price string
RequiresSubscription string
Uploader string
Live string
}

View File

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -83,9 +83,9 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
s := h.Sites[0] s := h.Sites[0]
require.Len(t, s.Menus, 2) require.Len(t, s.Menus(), 2)
p1 := s.RegularPages[0].Menus() p1 := s.RegularPages()[0].Menus()
// There is only one menu in the page, but it is "member of" 2 // There is only one menu in the page, but it is "member of" 2
require.Len(t, p1, 1) require.Len(t, p1, 1)

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -17,13 +17,10 @@ import (
"testing" "testing"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require"
) )
func TestMinifyPublisher(t *testing.T) { func TestMinifyPublisher(t *testing.T) {
t.Parallel() t.Parallel()
assert := require.New(t)
v := viper.New() v := viper.New()
v.Set("minify", true) v.Set("minify", true)
@ -43,29 +40,24 @@ func TestMinifyPublisher(t *testing.T) {
<body id="home"> <body id="home">
<h1>{{ .Page.Title }}</h1> <h1>{{ .Title }}</h1>
<p>{{ .Permalink }}</p>
</body> </body>
</html> </html>
` `
b := newTestSitesBuilder(t) b := newTestSitesBuilder(t)
b.WithViper(v).WithContent("page.md", pageWithAlias) b.WithViper(v).WithTemplatesAdded("layouts/index.html", htmlTemplate)
b.WithTemplates("_default/list.html", htmlTemplate, "_default/single.html", htmlTemplate, "alias.html", htmlTemplate)
b.CreateSites().Build(BuildCfg{}) b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 1)
// Check minification // Check minification
// HTML // HTML
b.AssertFileContent("public/page/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate all you really need…</title><link rel=stylesheet href=css/style.css></head><body id=home><h1>Has Alias</h1></body></html>") b.AssertFileContent("public/index.html", "<!doctype html>")
// HTML alias. Note the custom template which does no redirect.
b.AssertFileContent("public/foo/bar/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate ")
// RSS // RSS
b.AssertFileContent("public/index.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\"><channel><title/><link>https://example.org/</link>") b.AssertFileContent("public/index.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\"><channel><title/><link>https://example.org/</link>")
// Sitemap // Sitemap
b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>https://example.org/</loc><priority>0</priority></url><url>") b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>h")
} }

View File

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -62,10 +62,10 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
languages := make(langs.Languages, len(sites)) languages := make(langs.Languages, len(sites))
for i, s := range sites { for i, s := range sites {
if s.Language == nil { if s.language == nil {
return nil, errors.New("Missing language for site") return nil, errors.New("missing language for site")
} }
languages[i] = s.Language languages[i] = s.language
} }
defaultLang := cfg.GetString("defaultContentLanguage") defaultLang := cfg.GetString("defaultContentLanguage")
@ -78,19 +78,15 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
} }
func newMultiLingualForLanguage(language *langs.Language) *Multilingual {
languages := langs.Languages{language}
return &Multilingual{Languages: languages, DefaultLang: language}
}
func (ml *Multilingual) enabled() bool { func (ml *Multilingual) enabled() bool {
return len(ml.Languages) > 1 return len(ml.Languages) > 1
} }
func (s *Site) multilingualEnabled() bool { func (s *Site) multilingualEnabled() bool {
if s.owner == nil { if s.h == nil {
return false return false
} }
return s.owner.multilingual != nil && s.owner.multilingual.enabled() return s.h.multilingual != nil && s.h.multilingual.enabled()
} }
func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (langs.Languages, error) { func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (langs.Languages, error) {

View File

@ -1,99 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"sync"
)
type orderedMap struct {
sync.RWMutex
keys []interface{}
m map[interface{}]interface{}
}
func newOrderedMap() *orderedMap {
return &orderedMap{m: make(map[interface{}]interface{})}
}
func newOrderedMapFromStringMapString(m map[string]string) *orderedMap {
om := newOrderedMap()
for k, v := range m {
om.Add(k, v)
}
return om
}
func (m *orderedMap) Add(k, v interface{}) {
m.Lock()
defer m.Unlock()
_, found := m.m[k]
if found {
panic(fmt.Sprintf("%v already added", v))
}
m.m[k] = v
m.keys = append(m.keys, k)
}
func (m *orderedMap) Get(k interface{}) (interface{}, bool) {
m.RLock()
defer m.RUnlock()
v, found := m.m[k]
return v, found
}
func (m *orderedMap) Contains(k interface{}) bool {
m.RLock()
defer m.RUnlock()
_, found := m.m[k]
return found
}
func (m *orderedMap) Keys() []interface{} {
m.RLock()
defer m.RUnlock()
return m.keys
}
func (m *orderedMap) Len() int {
m.RLock()
defer m.RUnlock()
return len(m.keys)
}
// Some shortcuts for known types.
func (m *orderedMap) getShortcode(k interface{}) *shortcode {
v, found := m.Get(k)
if !found {
return nil
}
return v.(*shortcode)
}
func (m *orderedMap) getShortcodeRenderer(k interface{}) func() (string, error) {
v, found := m.Get(k)
if !found {
return nil
}
return v.(func() (string, error))
}
func (m *orderedMap) getString(k interface{}) string {
v, found := m.Get(k)
if !found {
return ""
}
return v.(string)
}

View File

@ -1,69 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"sync"
"testing"
"github.com/stretchr/testify/require"
)
func TestOrderedMap(t *testing.T) {
t.Parallel()
assert := require.New(t)
m := newOrderedMap()
m.Add("b", "vb")
m.Add("c", "vc")
m.Add("a", "va")
b, f1 := m.Get("b")
assert.True(f1)
assert.Equal(b, "vb")
assert.True(m.Contains("b"))
assert.False(m.Contains("e"))
assert.Equal([]interface{}{"b", "c", "a"}, m.Keys())
}
func TestOrderedMapConcurrent(t *testing.T) {
t.Parallel()
assert := require.New(t)
var wg sync.WaitGroup
m := newOrderedMap()
for i := 1; i < 20; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
key := fmt.Sprintf("key%d", id)
val := key + "val"
m.Add(key, val)
v, found := m.Get(key)
assert.True(found)
assert.Equal(v, val)
assert.True(m.Contains(key))
assert.True(m.Len() > 0)
assert.True(len(m.Keys()) > 0)
}(i)
}
wg.Wait()
}

File diff suppressed because it is too large Load Diff

112
hugolib/page__common.go Normal file
View File

@ -0,0 +1,112 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
type pageCommon struct {
s *Site
m *pageMeta
// Laziliy initialized dependencies.
init *lazy.Init
// All of these represents the common parts of a page.Page
maps.Scratcher
navigation.PageMenusProvider
page.AuthorProvider
page.PageRenderProvider
page.AlternativeOutputFormatsProvider
page.ChildCareProvider
page.FileProvider
page.GetPageProvider
page.GitInfoProvider
page.InSectionPositioner
page.OutputFormatsProvider
page.PageMetaProvider
page.Positioner
page.RawContentProvider
page.RelatedKeywordsProvider
page.RefProvider
page.ShortcodeInfoProvider
page.SitesProvider
page.DeprecatedWarningPageMethods
page.TranslationsProvider
page.TreeProvider
resource.LanguageProvider
resource.ResourceDataProvider
resource.ResourceMetaProvider
resource.ResourceParamsProvider
resource.ResourceTypesProvider
resource.TranslationKeyProvider
compare.Eqer
// Describes how paths and URLs for this page and its descendants
// should look like.
targetPathDescriptor page.TargetPathDescriptor
layoutDescriptor output.LayoutDescriptor
layoutDescriptorInit sync.Once
// The parsed page content.
pageContent
// Set if feature enabled and this is in a Git repo.
gitInfo *gitmap.GitInfo
// Positional navigation
posNextPrev *nextPrev
posNextPrevSection *nextPrev
// Menus
pageMenus *pageMenus
// Internal use
page.InternalDependencies
// The children. Regular pages will have none.
pages page.Pages
pagesInit sync.Once
// Any bundled resources
resources resource.Resources
resourcesInit sync.Once
translations page.Pages
allTranslations page.Pages
// Calculated an cached translation mapping key
translationKey string
translationKeyInit sync.Once
// Will only be set for sections and regular pages.
parent *pageState
// Will only be set for section pages and the home page.
subSections page.Pages
// Set in fast render mode to force render a given page.
forceRender bool
}

135
hugolib/page__content.go Normal file
View File

@ -0,0 +1,135 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/pageparser"
)
var (
internalSummaryDividerBase = "HUGOMORE42"
internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase)
internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n")
)
// The content related items on a Page.
type pageContent struct {
renderable bool
selfLayout string
truncated bool
cmap *pageContentMap
shortcodeState *shortcodeHandler
source rawPageContent
}
// returns the content to be processed by Blackfriday or similar.
func (p pageContent) contentToRender(renderedShortcodes map[string]string) []byte {
source := p.source.parsed.Input()
c := make([]byte, 0, len(source)+(len(source)/10))
for _, it := range p.cmap.items {
switch v := it.(type) {
case pageparser.Item:
c = append(c, source[v.Pos:v.Pos+len(v.Val)]...)
case pageContentReplacement:
c = append(c, v.val...)
case *shortcode:
if v.doMarkup || !p.renderable {
// Insert the rendered shortcode.
renderedShortcode, found := renderedShortcodes[v.placeholder]
if !found {
// This should never happen.
panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
}
c = append(c, []byte(renderedShortcode)...)
} else {
// Insert the placeholder so we can insert the content after
// markdown processing.
c = append(c, []byte(v.placeholder)...)
}
default:
panic(fmt.Sprintf("unkown item type %T", it))
}
}
return c
}
func (p pageContent) selfLayoutForOutput(f output.Format) string {
if p.selfLayout == "" {
return ""
}
return p.selfLayout + f.Name
}
type rawPageContent struct {
hasSummaryDivider bool
// The AST of the parsed page. Contains information about:
// shortcodes, front matter, summary indicators.
parsed pageparser.Result
// Returns the position in bytes after any front matter.
posMainContent int
// These are set if we're able to determine this from the source.
posSummaryEnd int
posBodyStart int
}
type pageContentReplacement struct {
val []byte
source pageparser.Item
}
type pageContentMap struct {
// If not, we can skip any pre-rendering of shortcodes.
hasMarkdownShortcode bool
// Indicates whether we must do placeholder replacements.
hasNonMarkdownShortcode bool
// *shortcode, pageContentReplacement or pageparser.Item
items []interface{}
}
func (p *pageContentMap) AddBytes(item pageparser.Item) {
p.items = append(p.items, item)
}
func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) {
p.items = append(p.items, pageContentReplacement{val: val, source: source})
}
func (p *pageContentMap) AddShortcode(s *shortcode) {
p.items = append(p.items, s)
if s.doMarkup {
p.hasMarkdownShortcode = true
} else {
p.hasNonMarkdownShortcode = true
}
}

70
hugolib/page__data.go Normal file
View File

@ -0,0 +1,70 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/gohugoio/hugo/resources/page"
)
type pageData struct {
*pageState
dataInit sync.Once
data page.Data
}
func (p *pageData) Data() interface{} {
p.dataInit.Do(func() {
p.data = make(page.Data)
if p.Kind() == page.KindPage {
return
}
switch p.Kind() {
case page.KindTaxonomy:
termInfo := p.getTaxonomyNodeInfo()
pluralInfo := termInfo.parent
singular := pluralInfo.singular
plural := pluralInfo.plural
term := termInfo.term
taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey)
p.data[singular] = taxonomy
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Term"] = term
case page.KindTaxonomyTerm:
info := p.getTaxonomyNodeInfo()
plural := info.plural
singular := info.singular
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Terms"] = p.s.Taxonomies[plural]
// keep the following just for legacy reasons
p.data["OrderedIndex"] = p.data["Terms"]
p.data["Index"] = p.data["Terms"]
}
// Assign the function to the map to make sure it is lazily initialized
p.data["pages"] = p.Pages
})
return p.data
}

74
hugolib/page__menus.go Normal file
View File

@ -0,0 +1,74 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/gohugoio/hugo/navigation"
)
type pageMenus struct {
p *pageState
q navigation.MenyQueryProvider
pmInit sync.Once
pm navigation.PageMenus
}
func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
p.p.s.init.menus.Do()
p.init()
return p.q.HasMenuCurrent(menuID, me)
}
func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
p.p.s.init.menus.Do()
p.init()
return p.q.IsMenuCurrent(menuID, inme)
}
func (p *pageMenus) Menus() navigation.PageMenus {
// There is a reverse dependency here. initMenus will, once, build the
// site menus and update any relevant page.
p.p.s.init.menus.Do()
return p.menus()
}
func (p *pageMenus) menus() navigation.PageMenus {
p.init()
return p.pm
}
func (p *pageMenus) init() {
p.pmInit.Do(func() {
p.q = navigation.NewMenuQueryProvider(
p.p.s.Info.sectionPagesMenu,
p,
p.p.s,
p.p,
)
var err error
p.pm, err = navigation.PageMenusFromPage(p.p)
if err != nil {
p.p.s.Log.ERROR.Println(p.p.wrapError(err))
}
})
}

652
hugolib/page__meta.go Normal file
View File

@ -0,0 +1,652 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path"
"regexp"
"strings"
"time"
"github.com/gohugoio/hugo/related"
"github.com/gohugoio/hugo/source"
"github.com/markbates/inflect"
"github.com/mitchellh/mapstructure"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cast"
)
var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
type pageMeta struct {
// kind is the discriminator that identifies the different page types
// in the different page collections. This can, as an example, be used
// to to filter regular pages, find sections etc.
// Kind will, for the pages available to the templates, be one of:
// page, home, section, taxonomy and taxonomyTerm.
// It is of string type to make it easy to reason about in
// the templates.
kind string
// This is a standalone page not part of any page collection. These
// include sitemap, robotsTXT and similar. It will have no pageOutputs, but
// a fixed pageOutput.
standalone bool
bundleType string
// Params contains configuration defined in the params section of page frontmatter.
params map[string]interface{}
title string
linkTitle string
resourcePath string
weight int
markup string
contentType string
// whether the content is in a CJK language.
isCJKLanguage bool
layout string
aliases []string
draft bool
description string
keywords []string
urlPaths pagemeta.URLPath
resource.Dates
// This is enabled if it is a leaf bundle (the "index.md" type) and it is marked as headless in front matter.
// Being headless means that
// 1. The page itself is not rendered to disk
// 2. It is not available in .Site.Pages etc.
// 3. But you can get it via .Site.GetPage
headless bool
// A key that maps to translation(s) of this page. This value is fetched
// from the page front matter.
translationKey string
// From front matter.
configuredOutputFormats output.Formats
// This is the raw front matter metadata that is going to be assigned to
// the Resources above.
resourcesMetadata []map[string]interface{}
f source.File
sections []string
// Sitemap overrides from front matter.
sitemap config.Sitemap
s *Site
renderingConfig *helpers.BlackFriday
}
func (p *pageMeta) Aliases() []string {
return p.aliases
}
func (p *pageMeta) Author() page.Author {
authors := p.Authors()
for _, author := range authors {
return author
}
return page.Author{}
}
func (p *pageMeta) Authors() page.AuthorList {
authorKeys, ok := p.params["authors"]
if !ok {
return page.AuthorList{}
}
authors := authorKeys.([]string)
if len(authors) < 1 || len(p.s.Info.Authors) < 1 {
return page.AuthorList{}
}
al := make(page.AuthorList)
for _, author := range authors {
a, ok := p.s.Info.Authors[author]
if ok {
al[author] = a
}
}
return al
}
func (p *pageMeta) BundleType() string {
return p.bundleType
}
func (p *pageMeta) Description() string {
return p.description
}
func (p *pageMeta) Lang() string {
return p.s.Lang()
}
func (p *pageMeta) Draft() bool {
return p.draft
}
func (p *pageMeta) File() source.File {
return p.f
}
func (p *pageMeta) IsHome() bool {
return p.Kind() == page.KindHome
}
func (p *pageMeta) Keywords() []string {
return p.keywords
}
func (p *pageMeta) Kind() string {
return p.kind
}
func (p *pageMeta) Layout() string {
return p.layout
}
func (p *pageMeta) LinkTitle() string {
if p.linkTitle != "" {
return p.linkTitle
}
return p.Title()
}
func (p *pageMeta) Name() string {
if p.resourcePath != "" {
return p.resourcePath
}
return p.Title()
}
func (p *pageMeta) IsNode() bool {
return !p.IsPage()
}
func (p *pageMeta) IsPage() bool {
return p.Kind() == page.KindPage
}
// Param is a convenience method to do lookups in Page's and Site's Params map,
// in that order.
//
// This method is also implemented on SiteInfo.
// TODO(bep) interface
func (p *pageMeta) Param(key interface{}) (interface{}, error) {
return resource.Param(p, p.s.Info.Params(), key)
}
func (p *pageMeta) Params() map[string]interface{} {
return p.params
}
func (p *pageMeta) Path() string {
if p.File() != nil {
return p.File().Path()
}
return p.SectionsPath()
}
// RelatedKeywords implements the related.Document interface needed for fast page searches.
func (p *pageMeta) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
v, err := p.Param(cfg.Name)
if err != nil {
return nil, err
}
return cfg.ToKeywords(v)
}
func (p *pageMeta) IsSection() bool {
return p.Kind() == page.KindSection
}
func (p *pageMeta) Section() string {
if p.IsHome() {
return ""
}
if p.IsNode() {
if len(p.sections) == 0 {
// May be a sitemap or similar.
return ""
}
return p.sections[0]
}
if p.File() != nil {
return p.File().Section()
}
panic("invalid page state")
}
func (p *pageMeta) SectionsEntries() []string {
return p.sections
}
func (p *pageMeta) SectionsPath() string {
return path.Join(p.SectionsEntries()...)
}
func (p *pageMeta) Sitemap() config.Sitemap {
return p.sitemap
}
func (p *pageMeta) Title() string {
return p.title
}
func (p *pageMeta) Type() string {
if p.contentType != "" {
return p.contentType
}
if x := p.Section(); x != "" {
return x
}
return "page"
}
func (p *pageMeta) Weight() int {
return p.weight
}
func (pm *pageMeta) setMetadata(p *pageState, frontmatter map[string]interface{}) error {
if frontmatter == nil {
return errors.New("missing frontmatter data")
}
pm.params = make(map[string]interface{})
// Needed for case insensitive fetching of params values
maps.ToLower(frontmatter)
var mtime time.Time
if p.File().FileInfo() != nil {
mtime = p.File().FileInfo().ModTime()
}
var gitAuthorDate time.Time
if p.gitInfo != nil {
gitAuthorDate = p.gitInfo.AuthorDate
}
descriptor := &pagemeta.FrontMatterDescriptor{
Frontmatter: frontmatter,
Params: pm.params,
Dates: &pm.Dates,
PageURLs: &pm.urlPaths,
BaseFilename: p.File().ContentBaseName(),
ModTime: mtime,
GitAuthorDate: gitAuthorDate,
}
// Handle the date separately
// TODO(bep) we need to "do more" in this area so this can be split up and
// more easily tested without the Page, but the coupling is strong.
err := pm.s.frontmatterHandler.HandleDates(descriptor)
if err != nil {
p.s.Log.ERROR.Printf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
}
var sitemapSet bool
var draft, published, isCJKLanguage *bool
for k, v := range frontmatter {
loki := strings.ToLower(k)
if loki == "published" { // Intentionally undocumented
vv, err := cast.ToBoolE(v)
if err == nil {
published = &vv
}
// published may also be a date
continue
}
if pm.s.frontmatterHandler.IsDateKey(loki) {
continue
}
switch loki {
case "title":
pm.title = cast.ToString(v)
pm.params[loki] = pm.title
case "linktitle":
pm.linkTitle = cast.ToString(v)
pm.params[loki] = pm.linkTitle
case "description":
pm.description = cast.ToString(v)
pm.params[loki] = pm.description
case "slug":
// Don't start or end with a -
pm.urlPaths.Slug = strings.Trim(cast.ToString(v), "-")
pm.params[loki] = pm.Slug()
case "url":
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
return fmt.Errorf("only relative URLs are supported, %v provided", url)
}
pm.urlPaths.URL = cast.ToString(v)
pm.params[loki] = pm.urlPaths.URL
case "type":
pm.contentType = cast.ToString(v)
pm.params[loki] = pm.contentType
case "keywords":
pm.keywords = cast.ToStringSlice(v)
pm.params[loki] = pm.keywords
case "headless":
// For now, only the leaf bundles ("index.md") can be headless (i.e. produce no output).
// We may expand on this in the future, but that gets more complex pretty fast.
if p.File().TranslationBaseName() == "index" {
pm.headless = cast.ToBool(v)
}
pm.params[loki] = pm.headless
case "outputs":
o := cast.ToStringSlice(v)
if len(o) > 0 {
// Output formats are exlicitly set in front matter, use those.
outFormats, err := p.s.outputFormatsConfig.GetByNames(o...)
if err != nil {
p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
} else {
pm.configuredOutputFormats = outFormats
pm.params[loki] = outFormats
}
}
case "draft":
draft = new(bool)
*draft = cast.ToBool(v)
case "layout":
pm.layout = cast.ToString(v)
pm.params[loki] = pm.layout
case "markup":
pm.markup = cast.ToString(v)
pm.params[loki] = pm.markup
case "weight":
pm.weight = cast.ToInt(v)
pm.params[loki] = pm.weight
case "aliases":
pm.aliases = cast.ToStringSlice(v)
for _, alias := range pm.aliases {
if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
return fmt.Errorf("only relative aliases are supported, %v provided", alias)
}
}
pm.params[loki] = pm.aliases
case "sitemap":
p.m.sitemap = config.DecodeSitemap(p.s.siteCfg.sitemap, cast.ToStringMap(v))
pm.params[loki] = p.m.sitemap
sitemapSet = true
case "iscjklanguage":
isCJKLanguage = new(bool)
*isCJKLanguage = cast.ToBool(v)
case "translationkey":
pm.translationKey = cast.ToString(v)
pm.params[loki] = pm.translationKey
case "resources":
var resources []map[string]interface{}
handled := true
switch vv := v.(type) {
case []map[interface{}]interface{}:
for _, vvv := range vv {
resources = append(resources, cast.ToStringMap(vvv))
}
case []map[string]interface{}:
resources = append(resources, vv...)
case []interface{}:
for _, vvv := range vv {
switch vvvv := vvv.(type) {
case map[interface{}]interface{}:
resources = append(resources, cast.ToStringMap(vvvv))
case map[string]interface{}:
resources = append(resources, vvvv)
}
}
default:
handled = false
}
if handled {
pm.params[loki] = resources
pm.resourcesMetadata = resources
break
}
fallthrough
default:
// If not one of the explicit values, store in Params
switch vv := v.(type) {
case bool:
pm.params[loki] = vv
case string:
pm.params[loki] = vv
case int64, int32, int16, int8, int:
pm.params[loki] = vv
case float64, float32:
pm.params[loki] = vv
case time.Time:
pm.params[loki] = vv
default: // handle array of strings as well
switch vvv := vv.(type) {
case []interface{}:
if len(vvv) > 0 {
switch vvv[0].(type) {
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
pm.params[loki] = vvv
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
pm.params[loki] = vvv
case []interface{}:
pm.params[loki] = vvv
default:
a := make([]string, len(vvv))
for i, u := range vvv {
a[i] = cast.ToString(u)
}
pm.params[loki] = a
}
} else {
pm.params[loki] = []string{}
}
default:
pm.params[loki] = vv
}
}
}
}
if !sitemapSet {
pm.sitemap = p.s.siteCfg.sitemap
}
pm.markup = helpers.GuessType(pm.markup)
if draft != nil && published != nil {
pm.draft = *draft
p.m.s.Log.WARN.Printf("page %q has both draft and published settings in its frontmatter. Using draft.", p.File().Filename())
} else if draft != nil {
pm.draft = *draft
} else if published != nil {
pm.draft = !*published
}
pm.params["draft"] = pm.draft
if isCJKLanguage != nil {
pm.isCJKLanguage = *isCJKLanguage
} else if p.s.siteCfg.hasCJKLanguage {
if cjkRe.Match(p.source.parsed.Input()) {
pm.isCJKLanguage = true
} else {
pm.isCJKLanguage = false
}
}
pm.params["iscjklanguage"] = p.m.isCJKLanguage
return nil
}
func (p *pageMeta) applyDefaultValues() error {
if p.markup == "" {
if p.File() != nil {
// Fall back to {file extension
p.markup = helpers.GuessType(p.File().Ext())
}
if p.markup == "" {
p.markup = "unknown"
}
}
if p.title == "" {
switch p.Kind() {
case page.KindHome:
p.title = p.s.Info.title
case page.KindSection:
sectionName := helpers.FirstUpper(p.sections[0])
if p.s.Cfg.GetBool("pluralizeListTitles") {
p.title = inflect.Pluralize(sectionName)
} else {
p.title = sectionName
}
case page.KindTaxonomy:
key := p.sections[len(p.sections)-1]
p.title = strings.Replace(p.s.titleFunc(key), "-", " ", -1)
case page.KindTaxonomyTerm:
p.title = p.s.titleFunc(p.sections[0])
case kind404:
p.title = "404 Page not found"
}
}
if p.IsNode() {
p.bundleType = "branch"
} else {
source := p.File()
if fi, ok := source.(*fileInfo); ok {
switch fi.bundleTp {
case bundleBranch:
p.bundleType = "branch"
case bundleLeaf:
p.bundleType = "leaf"
}
}
}
bfParam := getParamToLower(p, "blackfriday")
if bfParam != nil {
p.renderingConfig = p.s.ContentSpec.BlackFriday
// Create a copy so we can modify it.
bf := *p.s.ContentSpec.BlackFriday
p.renderingConfig = &bf
pageParam := cast.ToStringMap(bfParam)
if err := mapstructure.Decode(pageParam, &p.renderingConfig); err != nil {
return errors.WithMessage(err, "failed to decode rendering config")
}
}
return nil
}
// The output formats this page will be rendered to.
func (m *pageMeta) outputFormats() output.Formats {
if len(m.configuredOutputFormats) > 0 {
return m.configuredOutputFormats
}
return m.s.outputFormats[m.Kind()]
}
func (p *pageMeta) Slug() string {
return p.urlPaths.Slug
}
func getParam(m resource.ResourceParamsProvider, key string, stringToLower bool) interface{} {
v := m.Params()[strings.ToLower(key)]
if v == nil {
return nil
}
switch val := v.(type) {
case bool:
return val
case string:
if stringToLower {
return strings.ToLower(val)
}
return val
case int64, int32, int16, int8, int:
return cast.ToInt(v)
case float64, float32:
return cast.ToFloat64(v)
case time.Time:
return val
case []string:
if stringToLower {
return helpers.SliceToLower(val)
}
return v
case map[string]interface{}: // JSON and TOML
return v
case map[interface{}]interface{}: // YAML
return v
}
//p.s.Log.ERROR.Printf("GetParam(\"%s\"): Unknown type %s\n", key, reflect.TypeOf(v))
return nil
}
func getParamToLower(m resource.ResourceParamsProvider, key string) interface{} {
return getParam(m, key, true)
}

291
hugolib/page__new.go Normal file
View File

@ -0,0 +1,291 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"html/template"
"strings"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
func newPageBase(metaProvider *pageMeta) (*pageState, error) {
if metaProvider.s == nil {
panic("must provide a Site")
}
s := metaProvider.s
ps := &pageState{
pageOutput: nopPageOutput,
pageCommon: &pageCommon{
FileProvider: metaProvider,
AuthorProvider: metaProvider,
Scratcher: maps.NewScratcher(),
Positioner: page.NopPage,
InSectionPositioner: page.NopPage,
ResourceMetaProvider: metaProvider,
ResourceParamsProvider: metaProvider,
PageMetaProvider: metaProvider,
RelatedKeywordsProvider: metaProvider,
OutputFormatsProvider: page.NopPage,
ResourceTypesProvider: pageTypesProvider,
RefProvider: page.NopPage,
ShortcodeInfoProvider: page.NopPage,
LanguageProvider: s,
InternalDependencies: s,
init: lazy.New(),
m: metaProvider,
s: s},
}
siteAdapter := pageSiteAdapter{s: s, p: ps}
deprecatedWarningPage := struct {
source.FileWithoutOverlap
page.DeprecatedWarningPageMethods1
}{
FileWithoutOverlap: metaProvider.File(),
DeprecatedWarningPageMethods1: &pageDeprecatedWarning{p: ps},
}
ps.DeprecatedWarningPageMethods = page.NewDeprecatedWarningPage(deprecatedWarningPage)
ps.pageMenus = &pageMenus{p: ps}
ps.PageMenusProvider = ps.pageMenus
ps.GetPageProvider = siteAdapter
ps.GitInfoProvider = ps
ps.TranslationsProvider = ps
ps.ResourceDataProvider = &pageData{pageState: ps}
ps.RawContentProvider = ps
ps.ChildCareProvider = ps
ps.TreeProvider = pageTree{p: ps}
ps.Eqer = ps
ps.TranslationKeyProvider = ps
ps.ShortcodeInfoProvider = ps
ps.PageRenderProvider = ps
ps.AlternativeOutputFormatsProvider = ps
return ps, nil
}
func newPageFromMeta(metaProvider *pageMeta) (*pageState, error) {
ps, err := newPageBase(metaProvider)
if err != nil {
return nil, err
}
if err := metaProvider.applyDefaultValues(); err != nil {
return nil, err
}
ps.init.Add(func() (interface{}, error) {
pp, err := newPagePaths(metaProvider.s, ps, metaProvider)
if err != nil {
return nil, err
}
makeOut := func(f output.Format, render bool) *pageOutput {
return newPageOutput(nil, ps, pp, f, render)
}
if ps.m.standalone {
ps.pageOutput = makeOut(ps.m.outputFormats()[0], true)
} else {
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
outputFormatsForPage := ps.m.outputFormats()
for i, f := range ps.s.h.renderFormats {
po, found := created[f.Name]
if !found {
_, shouldRender := outputFormatsForPage.GetByName(f.Name)
po = makeOut(f, shouldRender)
created[f.Name] = po
}
ps.pageOutputs[i] = po
}
}
if err := ps.initCommonProviders(pp); err != nil {
return nil, err
}
return nil, nil
})
return ps, err
}
// Used by the legacy 404, sitemap and robots.txt rendering
func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) {
m.configuredOutputFormats = output.Formats{f}
m.standalone = true
p, err := newPageFromMeta(m)
if err != nil {
return nil, err
}
if err := p.initPage(); err != nil {
return nil, err
}
return p, nil
}
func newPageWithContent(f *fileInfo, s *Site, content resource.OpenReadSeekCloser) (*pageState, error) {
sections := s.sectionsFromFile(f)
kind := s.kindFromFileInfoOrSections(f, sections)
if kind == page.KindTaxonomy {
s.PathSpec.MakePathsSanitized(sections)
}
metaProvider := &pageMeta{kind: kind, sections: sections, s: s, f: f}
ps, err := newPageBase(metaProvider)
if err != nil {
return nil, err
}
gi, err := s.h.gitInfoForPage(ps)
if err != nil {
return nil, errors.Wrap(err, "failed to load Git data")
}
ps.gitInfo = gi
r, err := content()
if err != nil {
return nil, err
}
defer r.Close()
parseResult, err := pageparser.Parse(
r,
pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji},
)
if err != nil {
return nil, err
}
ps.pageContent = pageContent{
source: rawPageContent{
parsed: parseResult,
posMainContent: -1,
posSummaryEnd: -1,
posBodyStart: -1,
},
}
ps.shortcodeState = newShortcodeHandler(ps, ps.s, nil)
if err := ps.mapContent(metaProvider); err != nil {
return nil, ps.wrapError(err)
}
if err := metaProvider.applyDefaultValues(); err != nil {
return nil, err
}
ps.init.Add(func() (interface{}, error) {
reuseContent := ps.renderable && !ps.shortcodeState.hasShortcodes()
// Creates what's needed for each output format.
contentPerOutput := newPageContentOutput(ps)
pp, err := newPagePaths(s, ps, metaProvider)
if err != nil {
return nil, err
}
// Prepare output formats for all sites.
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
outputFormatsForPage := ps.m.outputFormats()
for i, f := range ps.s.h.renderFormats {
if po, found := created[f.Name]; found {
ps.pageOutputs[i] = po
continue
}
_, render := outputFormatsForPage.GetByName(f.Name)
var contentProvider *pageContentOutput
if reuseContent && i > 0 {
contentProvider = ps.pageOutputs[0].cp
} else {
var err error
contentProvider, err = contentPerOutput(f)
if err != nil {
return nil, err
}
}
po := newPageOutput(contentProvider, ps, pp, f, render)
ps.pageOutputs[i] = po
created[f.Name] = po
}
if err := ps.initCommonProviders(pp); err != nil {
return nil, err
}
return nil, nil
})
return ps, nil
}
type pageDeprecatedWarning struct {
p *pageState
}
func (p *pageDeprecatedWarning) IsDraft() bool { return p.p.m.draft }
func (p *pageDeprecatedWarning) Hugo() hugo.Info { return p.p.s.Info.Hugo() }
func (p *pageDeprecatedWarning) LanguagePrefix() string { return p.p.s.Info.LanguagePrefix }
func (p *pageDeprecatedWarning) GetParam(key string) interface{} {
return p.p.m.params[strings.ToLower(key)]
}
func (p *pageDeprecatedWarning) RSSLink() template.URL {
f := p.p.OutputFormats().Get("RSS")
if f == nil {
return ""
}
return template.URL(f.Permalink())
}
func (p *pageDeprecatedWarning) URL() string {
if p.p.IsPage() && p.p.m.urlPaths.URL != "" {
// This is the url set in front matter
return p.p.m.urlPaths.URL
}
// Fall back to the relative permalink.
return p.p.RelPermalink()
}

107
hugolib/page__output.go Normal file
View File

@ -0,0 +1,107 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
func newPageOutput(
cp *pageContentOutput, // may be nil
ps *pageState,
pp pagePaths,
f output.Format,
render bool) *pageOutput {
var targetPathsProvider targetPathsHolder
var linksProvider resource.ResourceLinksProvider
ft, found := pp.targetPaths[f.Name]
if !found {
// Link to the main output format
ft = pp.targetPaths[pp.OutputFormats()[0].Format.Name]
}
targetPathsProvider = ft
linksProvider = ft
var paginatorProvider page.PaginatorProvider = page.NopPage
var pag *pagePaginator
if render && ps.IsNode() {
pag = &pagePaginator{source: ps}
paginatorProvider = pag
}
var contentProvider page.ContentProvider = page.NopPage
var tableOfContentsProvider page.TableOfContentsProvider = page.NopPage
if cp != nil {
contentProvider = cp
tableOfContentsProvider = cp
}
providers := struct {
page.ContentProvider
page.TableOfContentsProvider
page.PaginatorProvider
resource.ResourceLinksProvider
targetPather
}{
contentProvider,
tableOfContentsProvider,
paginatorProvider,
linksProvider,
targetPathsProvider,
}
po := &pageOutput{
f: f,
cp: cp,
pagePerOutputProviders: providers,
render: render,
paginator: pag,
}
return po
}
// We create a pageOutput for every output format combination, even if this
// particular page isn't configured to be rendered to that format.
type pageOutput struct {
// Set if this page isn't configured to be rendered to this format.
render bool
f output.Format
// Only set if render is set.
// Note that this will be lazily initialized, so only used if actually
// used in template(s).
paginator *pagePaginator
// This interface provides the functionality that is specific for this
// output format.
pagePerOutputProviders
// This may be nil.
cp *pageContentOutput
}
func (p *pageOutput) enablePlaceholders() {
if p.cp != nil {
p.cp.enablePlaceholders()
}
}

View File

@ -0,0 +1,83 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/gohugoio/hugo/resources/page"
)
type pagePaginator struct {
paginatorInit sync.Once
current *page.Pager
source *pageState
}
func (p *pagePaginator) Paginate(seq interface{}, options ...interface{}) (*page.Pager, error) {
var initErr error
p.paginatorInit.Do(func() {
pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
if err != nil {
initErr = err
return
}
pd := p.source.targetPathDescriptor
pd.Type = p.source.outputFormat()
paginator, err := page.Paginate(pd, seq, pagerSize)
if err != nil {
initErr = err
return
}
p.current = paginator.Pagers()[0]
})
if initErr != nil {
return nil, initErr
}
return p.current, nil
}
func (p *pagePaginator) Paginator(options ...interface{}) (*page.Pager, error) {
var initErr error
p.paginatorInit.Do(func() {
pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
if err != nil {
initErr = err
return
}
pd := p.source.targetPathDescriptor
pd.Type = p.source.outputFormat()
paginator, err := page.Paginate(pd, p.source.Pages(), pagerSize)
if err != nil {
initErr = err
return
}
p.current = paginator.Pagers()[0]
})
if initErr != nil {
return nil, initErr
}
return p.current, nil
}

148
hugolib/page__paths.go Normal file
View File

@ -0,0 +1,148 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"net/url"
"github.com/gohugoio/hugo/resources/page"
)
func newPagePaths(
s *Site,
p page.Page,
pm *pageMeta) (pagePaths, error) {
targetPathDescriptor, err := createTargetPathDescriptor(s, p, pm)
if err != nil {
return pagePaths{}, err
}
outputFormats := pm.outputFormats()
if len(outputFormats) == 0 {
outputFormats = pm.s.outputFormats[pm.Kind()]
}
if len(outputFormats) == 0 {
return pagePaths{}, nil
}
if pm.headless {
outputFormats = outputFormats[:1]
}
pageOutputFormats := make(page.OutputFormats, len(outputFormats))
targets := make(map[string]targetPathsHolder)
for i, f := range outputFormats {
desc := targetPathDescriptor
desc.Type = f
paths := page.CreateTargetPaths(desc)
var relPermalink, permalink string
if !pm.headless {
relPermalink = paths.RelPermalink(s.PathSpec)
permalink = paths.PermalinkForOutputFormat(s.PathSpec, f)
}
pageOutputFormats[i] = page.NewOutputFormat(relPermalink, permalink, len(outputFormats) == 1, f)
// Use the main format for permalinks, usually HTML.
permalinksIndex := 0
if f.Permalinkable {
// Unless it's permalinkable
permalinksIndex = i
}
targets[f.Name] = targetPathsHolder{
paths: paths,
OutputFormat: pageOutputFormats[permalinksIndex]}
}
return pagePaths{
outputFormats: pageOutputFormats,
targetPaths: targets,
targetPathDescriptor: targetPathDescriptor,
}, nil
}
type pagePaths struct {
outputFormats page.OutputFormats
targetPaths map[string]targetPathsHolder
targetPathDescriptor page.TargetPathDescriptor
}
func (l pagePaths) OutputFormats() page.OutputFormats {
return l.outputFormats
}
func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.TargetPathDescriptor, error) {
var (
dir string
baseName string
)
d := s.Deps
if p.File() != nil {
dir = p.File().Dir()
baseName = p.File().TranslationBaseName()
}
alwaysInSubDir := p.Kind() == kindSitemap
desc := page.TargetPathDescriptor{
PathSpec: d.PathSpec,
Kind: p.Kind(),
Sections: p.SectionsEntries(),
UglyURLs: s.Info.uglyURLs(p),
ForcePrefix: s.h.IsMultihost() || alwaysInSubDir,
Dir: dir,
URL: pm.urlPaths.URL,
}
if pm.Slug() != "" {
desc.BaseName = pm.Slug()
} else {
desc.BaseName = baseName
}
desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir)
desc.PrefixLink = s.getLanguagePermalinkLang(alwaysInSubDir)
// Expand only page.KindPage and page.KindTaxonomy; don't expand other Kinds of Pages
// like page.KindSection or page.KindTaxonomyTerm because they are "shallower" and
// the permalink configuration values are likely to be redundant, e.g.
// naively expanding /category/:slug/ would give /category/categories/ for
// the "categories" page.KindTaxonomyTerm.
if p.Kind() == page.KindPage || p.Kind() == page.KindTaxonomy {
opath, err := d.ResourceSpec.Permalinks.Expand(p.Section(), p)
if err != nil {
return desc, err
}
if opath != "" {
opath, _ = url.QueryUnescape(opath)
desc.ExpandedPermalink = opath
}
}
return desc, nil
}

445
hugolib/page__per_output.go Normal file
View File

@ -0,0 +1,445 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"bytes"
"context"
"fmt"
"html/template"
"strings"
"sync"
"unicode/utf8"
"github.com/gohugoio/hugo/lazy"
bp "github.com/gohugoio/hugo/bufferpool"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
var (
nopTargetPath = targetPathsHolder{}
nopPagePerOutput = struct {
resource.ResourceLinksProvider
page.ContentProvider
page.PageRenderProvider
page.PaginatorProvider
page.TableOfContentsProvider
page.AlternativeOutputFormatsProvider
targetPather
}{
page.NopPage,
page.NopPage,
page.NopPage,
page.NopPage,
page.NopPage,
page.NopPage,
nopTargetPath,
}
)
func newPageContentOutput(p *pageState) func(f output.Format) (*pageContentOutput, error) {
parent := p.init
return func(f output.Format) (*pageContentOutput, error) {
cp := &pageContentOutput{
p: p,
f: f,
}
initContent := func() error {
var err error
var hasVariants bool
cp.contentPlaceholders, hasVariants, err = p.shortcodeState.renderShortcodesForPage(p, f)
if err != nil {
return err
}
if p.render && !hasVariants {
// We can reuse this for the other output formats
cp.enableReuse()
}
cp.workContent = p.contentToRender(cp.contentPlaceholders)
isHTML := cp.p.m.markup == "html"
if p.renderable {
if !isHTML {
cp.workContent = cp.renderContent(p, cp.workContent)
tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent)
cp.tableOfContents = helpers.BytesToHTML(tmpTableOfContents)
cp.workContent = tmpContent
}
if cp.placeholdersEnabled {
// ToC was accessed via .Page.TableOfContents in the shortcode,
// at a time when the ToC wasn't ready.
cp.contentPlaceholders[tocShortcodePlaceholder] = string(cp.tableOfContents)
}
if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled {
// There are one or more replacement tokens to be replaced.
cp.workContent, err = replaceShortcodeTokens(cp.workContent, cp.contentPlaceholders)
if err != nil {
return err
}
}
if cp.p.source.hasSummaryDivider {
if isHTML {
src := p.source.parsed.Input()
// Use the summary sections as they are provided by the user.
if p.source.posSummaryEnd != -1 {
cp.summary = helpers.BytesToHTML(src[p.source.posMainContent:p.source.posSummaryEnd])
}
if cp.p.source.posBodyStart != -1 {
cp.workContent = src[cp.p.source.posBodyStart:]
}
} else {
summary, content, err := splitUserDefinedSummaryAndContent(cp.p.m.markup, cp.workContent)
if err != nil {
cp.p.s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", cp.p.pathOrTitle(), err)
} else {
cp.workContent = content
cp.summary = helpers.BytesToHTML(summary)
}
}
}
}
cp.content = helpers.BytesToHTML(cp.workContent)
if !p.renderable {
err := cp.addSelfTemplate()
return err
}
return nil
}
// Recursive loops can only happen in content files with template code (shortcodes etc.)
// Avoid creating new goroutines if we don't have to.
needTimeout := !p.renderable || p.shortcodeState.hasShortcodes()
if needTimeout {
cp.initMain = parent.BranchdWithTimeout(p.s.siteCfg.timeout, func(ctx context.Context) (interface{}, error) {
return nil, initContent()
})
} else {
cp.initMain = parent.Branch(func() (interface{}, error) {
return nil, initContent()
})
}
cp.initPlain = cp.initMain.Branch(func() (interface{}, error) {
cp.plain = helpers.StripHTML(string(cp.content))
cp.plainWords = strings.Fields(cp.plain)
cp.setWordCounts(p.m.isCJKLanguage)
if err := cp.setAutoSummary(); err != nil {
return err, nil
}
return nil, nil
})
return cp, nil
}
}
// pageContentOutput represents the Page content for a given output format.
type pageContentOutput struct {
f output.Format
// If we can safely reuse this for other output formats.
reuse bool
reuseInit sync.Once
p *pageState
// Lazy load dependencies
initMain *lazy.Init
initPlain *lazy.Init
placeholdersEnabled bool
placeholdersEnabledInit sync.Once
// Content state
workContent []byte
// Temporary storage of placeholders mapped to their content.
// These are shortcodes etc. Some of these will need to be replaced
// after any markup is rendered, so they share a common prefix.
contentPlaceholders map[string]string
// Content sections
content template.HTML
summary template.HTML
tableOfContents template.HTML
truncated bool
plainWords []string
plain string
fuzzyWordCount int
wordCount int
readingTime int
}
func (p *pageContentOutput) Content() (interface{}, error) {
p.p.s.initInit(p.initMain, p.p)
return p.content, nil
}
func (p *pageContentOutput) FuzzyWordCount() int {
p.p.s.initInit(p.initPlain, p.p)
return p.fuzzyWordCount
}
func (p *pageContentOutput) Len() int {
p.p.s.initInit(p.initMain, p.p)
return len(p.content)
}
func (p *pageContentOutput) Plain() string {
p.p.s.initInit(p.initPlain, p.p)
return p.plain
}
func (p *pageContentOutput) PlainWords() []string {
p.p.s.initInit(p.initPlain, p.p)
return p.plainWords
}
func (p *pageContentOutput) ReadingTime() int {
p.p.s.initInit(p.initPlain, p.p)
return p.readingTime
}
func (p *pageContentOutput) Summary() template.HTML {
p.p.s.initInit(p.initMain, p.p)
if !p.p.source.hasSummaryDivider {
p.p.s.initInit(p.initPlain, p.p)
}
return p.summary
}
func (p *pageContentOutput) TableOfContents() template.HTML {
p.p.s.initInit(p.initMain, p.p)
return p.tableOfContents
}
func (p *pageContentOutput) Truncated() bool {
if p.p.truncated {
return true
}
p.p.s.initInit(p.initPlain, p.p)
return p.truncated
}
func (p *pageContentOutput) WordCount() int {
p.p.s.initInit(p.initPlain, p.p)
return p.wordCount
}
func (p *pageContentOutput) setAutoSummary() error {
if p.p.source.hasSummaryDivider {
return nil
}
var summary string
var truncated bool
if p.p.m.isCJKLanguage {
summary, truncated = p.p.s.ContentSpec.TruncateWordsByRune(p.plainWords)
} else {
summary, truncated = p.p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain)
}
p.summary = template.HTML(summary)
p.truncated = truncated
return nil
}
func (cp *pageContentOutput) renderContent(p page.Page, content []byte) []byte {
return cp.p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
Content: content, RenderTOC: true, PageFmt: cp.p.m.markup,
Cfg: p.Language(),
DocumentID: p.File().UniqueID(), DocumentName: p.File().Path(),
Config: cp.p.getRenderingConfig()})
}
func (p *pageContentOutput) setWordCounts(isCJKLanguage bool) {
if isCJKLanguage {
p.wordCount = 0
for _, word := range p.plainWords {
runeCount := utf8.RuneCountInString(word)
if len(word) == runeCount {
p.wordCount++
} else {
p.wordCount += runeCount
}
}
} else {
p.wordCount = helpers.TotalWords(p.plain)
}
// TODO(bep) is set in a test. Fix that.
if p.fuzzyWordCount == 0 {
p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
}
if isCJKLanguage {
p.readingTime = (p.wordCount + 500) / 501
} else {
p.readingTime = (p.wordCount + 212) / 213
}
}
func (p *pageContentOutput) addSelfTemplate() error {
self := p.p.selfLayoutForOutput(p.f)
err := p.p.s.TemplateHandler().AddLateTemplate(self, string(p.content))
if err != nil {
return err
}
return nil
}
// A callback to signal that we have inserted a placeholder into the rendered
// content. This avoids doing extra replacement work.
func (p *pageContentOutput) enablePlaceholders() {
p.placeholdersEnabledInit.Do(func() {
p.placeholdersEnabled = true
})
}
func (p *pageContentOutput) enableReuse() {
p.reuseInit.Do(func() {
p.reuse = true
})
}
// these will be shifted out when rendering a given output format.
type pagePerOutputProviders interface {
targetPather
page.ContentProvider
page.PaginatorProvider
page.TableOfContentsProvider
resource.ResourceLinksProvider
}
type targetPather interface {
targetPaths() page.TargetPaths
}
type targetPathsHolder struct {
paths page.TargetPaths
page.OutputFormat
}
func (t targetPathsHolder) targetPaths() page.TargetPaths {
return t.paths
}
func executeToString(templ tpl.Template, data interface{}) (string, error) {
b := bp.GetBuffer()
defer bp.PutBuffer(b)
if err := templ.Execute(b, data); err != nil {
return "", err
}
return b.String(), nil
}
func splitUserDefinedSummaryAndContent(markup string, c []byte) (summary []byte, content []byte, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("summary split failed: %s", r)
}
}()
startDivider := bytes.Index(c, internalSummaryDividerBaseBytes)
if startDivider == -1 {
return
}
startTag := "p"
switch markup {
case "asciidoc":
startTag = "div"
}
// Walk back and forward to the surrounding tags.
start := bytes.LastIndex(c[:startDivider], []byte("<"+startTag))
end := bytes.Index(c[startDivider:], []byte("</"+startTag))
if start == -1 {
start = startDivider
} else {
start = startDivider - (startDivider - start)
}
if end == -1 {
end = startDivider + len(internalSummaryDividerBase)
} else {
end = startDivider + end + len(startTag) + 3
}
var addDiv bool
switch markup {
case "rst":
addDiv = true
}
withoutDivider := append(c[:start], bytes.Trim(c[end:], "\n")...)
if len(withoutDivider) > 0 {
summary = bytes.TrimSpace(withoutDivider[:start])
}
if addDiv {
// For the rst
summary = append(append([]byte(nil), summary...), []byte("</div>")...)
}
if err != nil {
return
}
content = bytes.TrimSpace(withoutDivider)
return
}

76
hugolib/page__position.go Normal file
View File

@ -0,0 +1,76 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/resources/page"
)
func newPagePosition(n *nextPrev) pagePosition {
return pagePosition{nextPrev: n}
}
func newPagePositionInSection(n *nextPrev) pagePositionInSection {
return pagePositionInSection{nextPrev: n}
}
type nextPrev struct {
init *lazy.Init
prevPage page.Page
nextPage page.Page
}
func (n *nextPrev) next() page.Page {
n.init.Do()
return n.nextPage
}
func (n *nextPrev) prev() page.Page {
n.init.Do()
return n.prevPage
}
type pagePosition struct {
*nextPrev
}
func (p pagePosition) Next() page.Page {
return p.next()
}
func (p pagePosition) NextPage() page.Page {
return p.Next()
}
func (p pagePosition) Prev() page.Page {
return p.prev()
}
func (p pagePosition) PrevPage() page.Page {
return p.Prev()
}
type pagePositionInSection struct {
*nextPrev
}
func (p pagePositionInSection) NextInSection() page.Page {
return p.next()
}
func (p pagePositionInSection) PrevInSection() page.Page {
return p.prev()
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -22,24 +22,43 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
) )
type refArgs struct { func newPageRef(p *pageState) pageRef {
Path string return pageRef{p: p}
Lang string
OutputFormat string
} }
func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error) { type pageRef struct {
p *pageState
}
func (p pageRef) Ref(argsm map[string]interface{}) (string, error) {
return p.ref(argsm, p.p)
}
func (p pageRef) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
return p.ref(argsm, source)
}
func (p pageRef) RelRef(argsm map[string]interface{}) (string, error) {
return p.relRef(argsm, p.p)
}
func (p pageRef) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
return p.relRef(argsm, source)
}
func (p pageRef) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error) {
var ra refArgs var ra refArgs
err := mapstructure.WeakDecode(args, &ra) err := mapstructure.WeakDecode(args, &ra)
if err != nil { if err != nil {
return ra, nil, nil return ra, nil, nil
} }
s := p.s
if ra.Lang != "" && ra.Lang != p.Lang() { s := p.p.s
if ra.Lang != "" && ra.Lang != p.p.s.Language().Lang {
// Find correct site // Find correct site
found := false found := false
for _, ss := range p.s.owner.Sites { for _, ss := range p.p.s.h.Sites {
if ss.Lang() == ra.Lang { if ss.Lang() == ra.Lang {
found = true found = true
s = ss s = ss
@ -47,7 +66,7 @@ func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error
} }
if !found { if !found {
p.s.siteRefLinker.logNotFound(ra.Path, fmt.Sprintf("no site found with lang %q", ra.Lang), p, text.Position{}) p.p.s.siteRefLinker.logNotFound(ra.Path, fmt.Sprintf("no site found with lang %q", ra.Lang), nil, text.Position{})
return ra, nil, nil return ra, nil, nil
} }
} }
@ -55,18 +74,14 @@ func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error
return ra, s, nil return ra, s, nil
} }
func (p *Page) Ref(argsm map[string]interface{}) (string, error) { func (p pageRef) ref(argsm map[string]interface{}, source interface{}) (string, error) {
return p.ref(argsm, p)
}
func (p *Page) ref(argsm map[string]interface{}, source interface{}) (string, error) {
args, s, err := p.decodeRefArgs(argsm) args, s, err := p.decodeRefArgs(argsm)
if err != nil { if err != nil {
return "", errors.Wrap(err, "invalid arguments to Ref") return "", errors.Wrap(err, "invalid arguments to Ref")
} }
if s == nil { if s == nil {
return p.s.siteRefLinker.notFoundURL, nil return p.p.s.siteRefLinker.notFoundURL, nil
} }
if args.Path == "" { if args.Path == "" {
@ -77,18 +92,14 @@ func (p *Page) ref(argsm map[string]interface{}, source interface{}) (string, er
} }
func (p *Page) RelRef(argsm map[string]interface{}) (string, error) { func (p pageRef) relRef(argsm map[string]interface{}, source interface{}) (string, error) {
return p.relRef(argsm, p)
}
func (p *Page) relRef(argsm map[string]interface{}, source interface{}) (string, error) {
args, s, err := p.decodeRefArgs(argsm) args, s, err := p.decodeRefArgs(argsm)
if err != nil { if err != nil {
return "", errors.Wrap(err, "invalid arguments to Ref") return "", errors.Wrap(err, "invalid arguments to Ref")
} }
if s == nil { if s == nil {
return p.s.siteRefLinker.notFoundURL, nil return p.p.s.siteRefLinker.notFoundURL, nil
} }
if args.Path == "" { if args.Path == "" {
@ -98,3 +109,9 @@ func (p *Page) relRef(argsm map[string]interface{}, source interface{}) (string,
return s.refLink(args.Path, source, true, args.OutputFormat) return s.refLink(args.Path, source, true, args.OutputFormat)
} }
type refArgs struct {
Path string
Lang string
OutputFormat string
}

113
hugolib/page__tree.go Normal file
View File

@ -0,0 +1,113 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/page"
)
type pageTree struct {
p *pageState
}
func (pt pageTree) IsAncestor(other interface{}) (bool, error) {
if pt.p == nil {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil || pp == nil {
return false, err
}
if pt.p.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
// A regular page is never its section's ancestor.
return false, nil
}
return helpers.HasStringsPrefix(pp.SectionsEntries(), pt.p.SectionsEntries()), nil
}
func (pt pageTree) CurrentSection() page.Page {
p := pt.p
if p.IsHome() || p.IsSection() {
return p
}
return p.Parent()
}
func (pt pageTree) IsDescendant(other interface{}) (bool, error) {
if pt.p == nil {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil || pp == nil {
return false, err
}
if pp.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
// A regular page is never its section's descendant.
return false, nil
}
return helpers.HasStringsPrefix(pt.p.SectionsEntries(), pp.SectionsEntries()), nil
}
func (pt pageTree) FirstSection() page.Page {
p := pt.p
parent := p.Parent()
if types.IsNil(parent) || parent.IsHome() {
return p
}
for {
current := parent
parent = parent.Parent()
if types.IsNil(parent) || parent.IsHome() {
return current
}
}
}
func (pt pageTree) InSection(other interface{}) (bool, error) {
if pt.p == nil || types.IsNil(other) {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil {
return false, err
}
if pp == nil {
return false, nil
}
return pp.CurrentSection().Eq(pt.p.CurrentSection()), nil
}
func (pt pageTree) Parent() page.Page {
return pt.p.parent
}
func (pt pageTree) Sections() page.Pages {
return pt.p.subSections
}

View File

@ -1,233 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"bytes"
"io"
"github.com/gohugoio/hugo/helpers"
errors "github.com/pkg/errors"
bp "github.com/gohugoio/hugo/bufferpool"
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/text"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/parser/pageparser"
)
var (
internalSummaryDividerBase = "HUGOMORE42"
internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase)
internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n")
)
// The content related items on a Page.
type pageContent struct {
renderable bool
// workContent is a copy of rawContent that may be mutated during site build.
workContent []byte
shortcodeState *shortcodeHandler
source rawPageContent
}
type rawPageContent struct {
hasSummaryDivider bool
// The AST of the parsed page. Contains information about:
// shortcodes, front matter, summary indicators.
parsed pageparser.Result
// Returns the position in bytes after any front matter.
posMainContent int
}
// TODO(bep) lazy consolidate
func (p *Page) mapContent() error {
p.shortcodeState = newShortcodeHandler(p)
s := p.shortcodeState
p.renderable = true
p.source.posMainContent = -1
result := bp.GetBuffer()
defer bp.PutBuffer(result)
iter := p.source.parsed.Iterator()
fail := func(err error, i pageparser.Item) error {
return p.parseError(err, iter.Input(), i.Pos)
}
// the parser is guaranteed to return items in proper order or fail, so …
// … it's safe to keep some "global" state
var currShortcode shortcode
var ordinal int
Loop:
for {
it := iter.Next()
switch {
case it.Type == pageparser.TypeIgnore:
case it.Type == pageparser.TypeHTMLStart:
// This is HTML without front matter. It can still have shortcodes.
p.renderable = false
result.Write(it.Val)
case it.IsFrontMatter():
f := metadecoders.FormatFromFrontMatterType(it.Type)
m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
if err != nil {
if fe, ok := err.(herrors.FileError); ok {
return herrors.ToFileErrorWithOffset(fe, iter.LineNumber()-1)
} else {
return err
}
}
if err := p.updateMetaData(m); err != nil {
return err
}
next := iter.Peek()
if !next.IsDone() {
p.source.posMainContent = next.Pos
}
if !p.shouldBuild() {
// Nothing more to do.
return nil
}
case it.Type == pageparser.TypeLeadSummaryDivider:
result.Write(internalSummaryDividerPre)
p.source.hasSummaryDivider = true
// Need to determine if the page is truncated.
f := func(item pageparser.Item) bool {
if item.IsNonWhitespace() {
p.truncated = true
// Done
return false
}
return true
}
iter.PeekWalk(f)
// Handle shortcode
case it.IsLeftShortcodeDelim():
// let extractShortcode handle left delim (will do so recursively)
iter.Backup()
currShortcode, err := s.extractShortcode(ordinal, iter, p)
if currShortcode.name != "" {
s.nameSet[currShortcode.name] = true
}
if err != nil {
return fail(errors.Wrap(err, "failed to extract shortcode"), it)
}
if currShortcode.params == nil {
currShortcode.params = make([]string, 0)
}
placeHolder := s.createShortcodePlaceholder()
result.WriteString(placeHolder)
ordinal++
s.shortcodes.Add(placeHolder, currShortcode)
case it.Type == pageparser.TypeEmoji:
if emoji := helpers.Emoji(it.ValStr()); emoji != nil {
result.Write(emoji)
} else {
result.Write(it.Val)
}
case it.IsEOF():
break Loop
case it.IsError():
err := fail(errors.WithStack(errors.New(it.ValStr())), it)
currShortcode.err = err
return err
default:
result.Write(it.Val)
}
}
resultBytes := make([]byte, result.Len())
copy(resultBytes, result.Bytes())
p.workContent = resultBytes
return nil
}
func (p *Page) parse(reader io.Reader) error {
parseResult, err := pageparser.Parse(
reader,
pageparser.Config{EnableEmoji: p.s.Cfg.GetBool("enableEmoji")},
)
if err != nil {
return err
}
p.source = rawPageContent{
parsed: parseResult,
}
p.lang = p.File.Lang()
if p.s != nil && p.s.owner != nil {
gi, enabled := p.s.owner.gitInfo.forPage(p)
if gi != nil {
p.GitInfo = gi
} else if enabled {
p.s.Log.INFO.Printf("Failed to find GitInfo for page %q", p.Path())
}
}
return nil
}
func (p *Page) parseError(err error, input []byte, offset int) error {
if herrors.UnwrapFileError(err) != nil {
// Use the most specific location.
return err
}
pos := p.posFromInput(input, offset)
return herrors.NewFileError("md", -1, pos.LineNumber, pos.ColumnNumber, err)
}
func (p *Page) posFromInput(input []byte, offset int) text.Position {
lf := []byte("\n")
input = input[:offset]
lineNumber := bytes.Count(input, lf) + 1
endOfLastLine := bytes.LastIndex(input, lf)
return text.Position{
Filename: p.pathOrTitle(),
LineNumber: lineNumber,
ColumnNumber: offset - endOfLastLine,
Offset: offset,
}
}
func (p *Page) posFromPage(offset int) text.Position {
return p.posFromInput(p.source.parsed.Input(), offset)
}

View File

@ -1,47 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"github.com/gohugoio/hugo/common/herrors"
errors "github.com/pkg/errors"
)
func (p *Page) errorf(err error, format string, a ...interface{}) error {
if herrors.UnwrapErrorWithFileContext(err) != nil {
// More isn't always better.
return err
}
args := append([]interface{}{p.Lang(), p.pathOrTitle()}, a...)
format = "[%s] page %q: " + format
if err == nil {
errors.Errorf(format, args...)
return fmt.Errorf(format, args...)
}
return errors.Wrapf(err, format, args...)
}
func (p *Page) errWithFileContext(err error) error {
err, _ = herrors.WithFileContextForFile(
err,
p.Filename(),
p.Filename(),
p.s.SourceSpec.Fs.Source,
herrors.SimpleLineMatcher)
return err
}

40
hugolib/page_kinds.go Normal file
View File

@ -0,0 +1,40 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"github.com/gohugoio/hugo/resources/page"
)
var (
// This is all the kinds we can expect to find in .Site.Pages.
allKindsInPages = []string{page.KindPage, page.KindHome, page.KindSection, page.KindTaxonomy, page.KindTaxonomyTerm}
allKinds = append(allKindsInPages, []string{kindRSS, kindSitemap, kindRobotsTXT, kind404}...)
)
const (
// Temporary state.
kindUnknown = "unknown"
// The following are (currently) temporary nodes,
// i.e. nodes we create just to render in isolation.
kindRSS = "RSS"
kindSitemap = "sitemap"
kindRobotsTXT = "robotsTXT"
kind404 = "404"
pageResourceType = "page"
)

View File

@ -1,320 +0,0 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"html/template"
"os"
"strings"
"sync"
bp "github.com/gohugoio/hugo/bufferpool"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/output"
)
// PageOutput represents one of potentially many output formats of a given
// Page.
type PageOutput struct {
*Page
// Pagination
paginator *Pager
paginatorInit sync.Once
// Page output specific resources
resources resource.Resources
resourcesInit sync.Once
// Keep this to create URL/path variations, i.e. paginators.
targetPathDescriptor targetPathDescriptor
outputFormat output.Format
}
func (p *PageOutput) targetPath(addends ...string) (string, error) {
tp, err := p.createTargetPath(p.outputFormat, false, addends...)
if err != nil {
return "", err
}
return tp, nil
}
func newPageOutput(p *Page, createCopy, initContent bool, f output.Format) (*PageOutput, error) {
// TODO(bep) This is only needed for tests and we should get rid of it.
if p.targetPathDescriptorPrototype == nil {
if err := p.initPaths(); err != nil {
return nil, err
}
}
if createCopy {
p = p.copy(initContent)
}
td, err := p.createTargetPathDescriptor(f)
if err != nil {
return nil, err
}
return &PageOutput{
Page: p,
outputFormat: f,
targetPathDescriptor: td,
}, nil
}
// copy creates a copy of this PageOutput with the lazy sync.Once vars reset
// so they will be evaluated again, for word count calculations etc.
func (p *PageOutput) copyWithFormat(f output.Format, initContent bool) (*PageOutput, error) {
c, err := newPageOutput(p.Page, true, initContent, f)
if err != nil {
return nil, err
}
c.paginator = p.paginator
return c, nil
}
func (p *PageOutput) copy() (*PageOutput, error) {
return p.copyWithFormat(p.outputFormat, false)
}
func (p *PageOutput) layouts(layouts ...string) ([]string, error) {
if len(layouts) == 0 && p.selfLayout != "" {
return []string{p.selfLayout}, nil
}
layoutDescriptor := p.layoutDescriptor
if len(layouts) > 0 {
layoutDescriptor.Layout = layouts[0]
layoutDescriptor.LayoutOverride = true
}
return p.s.layoutHandler.For(
layoutDescriptor,
p.outputFormat)
}
func (p *PageOutput) Render(layout ...string) template.HTML {
l, err := p.layouts(layout...)
if err != nil {
p.s.DistinctErrorLog.Printf("in .Render: Failed to resolve layout %q for page %q", layout, p.pathOrTitle())
return ""
}
for _, layout := range l {
templ, found := p.s.Tmpl.Lookup(layout)
if !found {
// This is legacy from when we had only one output format and
// HTML templates only. Some have references to layouts without suffix.
// We default to good old HTML.
templ, found = p.s.Tmpl.Lookup(layout + ".html")
}
if templ != nil {
res, err := executeToString(templ, p)
if err != nil {
p.s.DistinctErrorLog.Printf("in .Render: Failed to execute template %q: %s", layout, err)
return template.HTML("")
}
return template.HTML(res)
}
}
return ""
}
func executeToString(templ tpl.Template, data interface{}) (string, error) {
b := bp.GetBuffer()
defer bp.PutBuffer(b)
if err := templ.Execute(b, data); err != nil {
return "", err
}
return b.String(), nil
}
func (p *Page) Render(layout ...string) template.HTML {
if p.mainPageOutput == nil {
panic(fmt.Sprintf("programming error: no mainPageOutput for %q", p.Path()))
}
return p.mainPageOutput.Render(layout...)
}
// OutputFormats holds a list of the relevant output formats for a given resource.
type OutputFormats []*OutputFormat
// OutputFormat links to a representation of a resource.
type OutputFormat struct {
// Rel constains a value that can be used to construct a rel link.
// This is value is fetched from the output format definition.
// Note that for pages with only one output format,
// this method will always return "canonical".
// As an example, the AMP output format will, by default, return "amphtml".
//
// See:
// https://www.ampproject.org/docs/guides/deploy/discovery
//
// Most other output formats will have "alternate" as value for this.
Rel string
// It may be tempting to export this, but let us hold on to that horse for a while.
f output.Format
p *Page
}
// Name returns this OutputFormat's name, i.e. HTML, AMP, JSON etc.
func (o OutputFormat) Name() string {
return o.f.Name
}
// MediaType returns this OutputFormat's MediaType (MIME type).
func (o OutputFormat) MediaType() media.Type {
return o.f.MediaType
}
// OutputFormats gives the output formats for this Page.
func (p *Page) OutputFormats() OutputFormats {
var o OutputFormats
for _, f := range p.outputFormats {
o = append(o, newOutputFormat(p, f))
}
return o
}
func newOutputFormat(p *Page, f output.Format) *OutputFormat {
rel := f.Rel
isCanonical := len(p.outputFormats) == 1
if isCanonical {
rel = "canonical"
}
return &OutputFormat{Rel: rel, f: f, p: p}
}
// AlternativeOutputFormats gives the alternative output formats for this PageOutput.
// Note that we use the term "alternative" and not "alternate" here, as it
// does not necessarily replace the other format, it is an alternative representation.
func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) {
var o OutputFormats
for _, of := range p.OutputFormats() {
if of.f.NotAlternative || of.f.Name == p.outputFormat.Name {
continue
}
o = append(o, of)
}
return o, nil
}
// deleteResource removes the resource from this PageOutput and the Page. They will
// always be of the same length, but may contain different elements.
func (p *PageOutput) deleteResource(i int) {
p.resources = append(p.resources[:i], p.resources[i+1:]...)
p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...)
}
func (p *PageOutput) Resources() resource.Resources {
p.resourcesInit.Do(func() {
// If the current out shares the same path as the main page output, we reuse
// the resource set. For the "amp" use case, we need to clone them with new
// base folder.
ff := p.outputFormats[0]
if p.outputFormat.Path == ff.Path {
p.resources = p.Page.Resources
return
}
// Clone it with new base.
resources := make(resource.Resources, len(p.Page.Resources))
for i, r := range p.Page.Resources {
if c, ok := r.(resource.Cloner); ok {
// Clone the same resource with a new target.
resources[i] = c.WithNewBase(p.outputFormat.Path)
} else {
resources[i] = r
}
}
p.resources = resources
})
return p.resources
}
func (p *PageOutput) renderResources() error {
for i, r := range p.Resources() {
src, ok := r.(resource.Source)
if !ok {
// Pages gets rendered with the owning page.
continue
}
if err := src.Publish(); err != nil {
if os.IsNotExist(err) {
// The resource has been deleted from the file system.
// This should be extremely rare, but can happen on live reload in server
// mode when the same resource is member of different page bundles.
p.deleteResource(i)
} else {
p.s.Log.ERROR.Printf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
}
} else {
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
}
}
return nil
}
// AlternativeOutputFormats is only available on the top level rendering
// entry point, and not inside range loops on the Page collections.
// This method is just here to inform users of that restriction.
func (p *Page) AlternativeOutputFormats() (OutputFormats, error) {
return nil, fmt.Errorf("AlternativeOutputFormats only available from the top level template context for page %q", p.Path())
}
// Get gets a OutputFormat given its name, i.e. json, html etc.
// It returns nil if not found.
func (o OutputFormats) Get(name string) *OutputFormat {
for _, f := range o {
if strings.EqualFold(f.f.Name, name) {
return f
}
}
return nil
}
// Permalink returns the absolute permalink to this output format.
func (o *OutputFormat) Permalink() string {
rel := o.p.createRelativePermalinkForOutputFormat(o.f)
perm, _ := o.p.s.permalinkForOutputFormat(rel, o.f)
return perm
}
// RelPermalink returns the relative permalink to this output format.
func (o *OutputFormat) RelPermalink() string {
rel := o.p.createRelativePermalinkForOutputFormat(o.f)
return o.p.s.PathSpec.PrependBasePath(rel, false)
}

View File

@ -1,312 +0,0 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path/filepath"
"net/url"
"strings"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/output"
)
// targetPathDescriptor describes how a file path for a given resource
// should look like on the file system. The same descriptor is then later used to
// create both the permalinks and the relative links, paginator URLs etc.
//
// The big motivating behind this is to have only one source of truth for URLs,
// and by that also get rid of most of the fragile string parsing/encoding etc.
//
// Page.createTargetPathDescriptor is the Page adapter.
//
type targetPathDescriptor struct {
PathSpec *helpers.PathSpec
Type output.Format
Kind string
Sections []string
// For regular content pages this is either
// 1) the Slug, if set,
// 2) the file base name (TranslationBaseName).
BaseName string
// Source directory.
Dir string
// Language prefix, set if multilingual and if page should be placed in its
// language subdir.
LangPrefix string
// Whether this is a multihost multilingual setup.
IsMultihost bool
// URL from front matter if set. Will override any Slug etc.
URL string
// Used to create paginator links.
Addends string
// The expanded permalink if defined for the section, ready to use.
ExpandedPermalink string
// Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
UglyURLs bool
}
// createTargetPathDescriptor adapts a Page and the given output.Format into
// a targetPathDescriptor. This descriptor can then be used to create paths
// and URLs for this Page.
func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) {
if p.targetPathDescriptorPrototype == nil {
panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.title, p.Kind))
}
d := *p.targetPathDescriptorPrototype
d.Type = t
return d, nil
}
func (p *Page) initTargetPathDescriptor() error {
d := &targetPathDescriptor{
PathSpec: p.s.PathSpec,
Kind: p.Kind,
Sections: p.sections,
UglyURLs: p.s.Info.uglyURLs(p),
Dir: filepath.ToSlash(p.Dir()),
URL: p.frontMatterURL,
IsMultihost: p.s.owner.IsMultihost(),
}
if p.Slug != "" {
d.BaseName = p.Slug
} else {
d.BaseName = p.TranslationBaseName()
}
if p.shouldAddLanguagePrefix() {
d.LangPrefix = p.Lang()
}
// Expand only KindPage and KindTaxonomy; don't expand other Kinds of Pages
// like KindSection or KindTaxonomyTerm because they are "shallower" and
// the permalink configuration values are likely to be redundant, e.g.
// naively expanding /category/:slug/ would give /category/categories/ for
// the "categories" KindTaxonomyTerm.
if p.Kind == KindPage || p.Kind == KindTaxonomy {
if override, ok := p.Site.Permalinks[p.Section()]; ok {
opath, err := override.Expand(p)
if err != nil {
return err
}
opath, _ = url.QueryUnescape(opath)
opath = filepath.FromSlash(opath)
d.ExpandedPermalink = opath
}
}
p.targetPathDescriptorPrototype = d
return nil
}
func (p *Page) initURLs() error {
if len(p.outputFormats) == 0 {
p.outputFormats = p.s.outputFormats[p.Kind]
}
target := filepath.ToSlash(p.createRelativeTargetPath())
rel := p.s.PathSpec.URLizeFilename(target)
var err error
f := p.outputFormats[0]
p.permalink, err = p.s.permalinkForOutputFormat(rel, f)
if err != nil {
return err
}
p.relTargetPathBase = strings.TrimPrefix(strings.TrimSuffix(target, f.MediaType.FullSuffix()), "/")
if prefix := p.s.GetLanguagePrefix(); prefix != "" {
// Any language code in the path will be added later.
p.relTargetPathBase = strings.TrimPrefix(p.relTargetPathBase, prefix+"/")
}
p.relPermalink = p.s.PathSpec.PrependBasePath(rel, false)
p.layoutDescriptor = p.createLayoutDescriptor()
return nil
}
func (p *Page) initPaths() error {
if err := p.initTargetPathDescriptor(); err != nil {
return err
}
if err := p.initURLs(); err != nil {
return err
}
return nil
}
// createTargetPath creates the target filename for this Page for the given
// output.Format. Some additional URL parts can also be provided, the typical
// use case being pagination.
func (p *Page) createTargetPath(t output.Format, noLangPrefix bool, addends ...string) (string, error) {
d, err := p.createTargetPathDescriptor(t)
if err != nil {
return "", nil
}
if noLangPrefix {
d.LangPrefix = ""
}
if len(addends) > 0 {
d.Addends = filepath.Join(addends...)
}
return createTargetPath(d), nil
}
func createTargetPath(d targetPathDescriptor) string {
pagePath := helpers.FilePathSeparator
// The top level index files, i.e. the home page etc., needs
// the index base even when uglyURLs is enabled.
needsBase := true
isUgly := d.UglyURLs && !d.Type.NoUgly
if d.ExpandedPermalink == "" && d.BaseName != "" && d.BaseName == d.Type.BaseName {
isUgly = true
}
if d.Kind != KindPage && d.URL == "" && len(d.Sections) > 0 {
if d.ExpandedPermalink != "" {
pagePath = filepath.Join(pagePath, d.ExpandedPermalink)
} else {
pagePath = filepath.Join(d.Sections...)
}
needsBase = false
}
if d.Type.Path != "" {
pagePath = filepath.Join(pagePath, d.Type.Path)
}
if d.Kind != KindHome && d.URL != "" {
if d.IsMultihost && d.LangPrefix != "" && !strings.HasPrefix(d.URL, "/"+d.LangPrefix) {
pagePath = filepath.Join(d.LangPrefix, pagePath, d.URL)
} else {
pagePath = filepath.Join(pagePath, d.URL)
}
if d.Addends != "" {
pagePath = filepath.Join(pagePath, d.Addends)
}
if strings.HasSuffix(d.URL, "/") || !strings.Contains(d.URL, ".") {
pagePath = filepath.Join(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
}
} else if d.Kind == KindPage {
if d.ExpandedPermalink != "" {
pagePath = filepath.Join(pagePath, d.ExpandedPermalink)
} else {
if d.Dir != "" {
pagePath = filepath.Join(pagePath, d.Dir)
}
if d.BaseName != "" {
pagePath = filepath.Join(pagePath, d.BaseName)
}
}
if d.Addends != "" {
pagePath = filepath.Join(pagePath, d.Addends)
}
if isUgly {
pagePath += d.Type.MediaType.FullSuffix()
} else {
pagePath = filepath.Join(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
}
if d.LangPrefix != "" {
pagePath = filepath.Join(d.LangPrefix, pagePath)
}
} else {
if d.Addends != "" {
pagePath = filepath.Join(pagePath, d.Addends)
}
needsBase = needsBase && d.Addends == ""
// No permalink expansion etc. for node type pages (for now)
base := ""
if needsBase || !isUgly {
base = helpers.FilePathSeparator + d.Type.BaseName
}
pagePath += base + d.Type.MediaType.FullSuffix()
if d.LangPrefix != "" {
pagePath = filepath.Join(d.LangPrefix, pagePath)
}
}
pagePath = filepath.Join(helpers.FilePathSeparator, pagePath)
// Note: MakePathSanitized will lower case the path if
// disablePathToLower isn't set.
return d.PathSpec.MakePathSanitized(pagePath)
}
func (p *Page) createRelativeTargetPath() string {
if len(p.outputFormats) == 0 {
if p.Kind == kindUnknown {
panic(fmt.Sprintf("Page %q has unknown kind", p.title))
}
panic(fmt.Sprintf("Page %q missing output format(s)", p.title))
}
// Choose the main output format. In most cases, this will be HTML.
f := p.outputFormats[0]
return p.createRelativeTargetPathForOutputFormat(f)
}
func (p *Page) createRelativePermalinkForOutputFormat(f output.Format) string {
return p.s.PathSpec.URLizeFilename(p.createRelativeTargetPathForOutputFormat(f))
}
func (p *Page) createRelativeTargetPathForOutputFormat(f output.Format) string {
tp, err := p.createTargetPath(f, p.s.owner.IsMultihost())
if err != nil {
p.s.Log.ERROR.Printf("Failed to create permalink for page %q: %s", p.FullFilePath(), err)
return ""
}
// For /index.json etc. we must use the full path.
if f.MediaType.FullSuffix() == ".html" && filepath.Base(tp) == "index.html" {
tp = strings.TrimSuffix(tp, f.BaseFilename())
}
return tp
}

View File

@ -1,194 +0,0 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"path/filepath"
"strings"
"testing"
"github.com/gohugoio/hugo/media"
"fmt"
"github.com/gohugoio/hugo/output"
)
func TestPageTargetPath(t *testing.T) {
pathSpec := newTestDefaultPathSpec(t)
noExtNoDelimMediaType := media.TextType
noExtNoDelimMediaType.Suffixes = []string{}
noExtNoDelimMediaType.Delimiter = ""
// Netlify style _redirects
noExtDelimFormat := output.Format{
Name: "NER",
MediaType: noExtNoDelimMediaType,
BaseName: "_redirects",
}
for _, multiHost := range []bool{false, true} {
for _, langPrefix := range []string{"", "no"} {
for _, uglyURLs := range []bool{false, true} {
t.Run(fmt.Sprintf("multihost=%t,langPrefix=%q,uglyURLs=%t", multiHost, langPrefix, uglyURLs),
func(t *testing.T) {
tests := []struct {
name string
d targetPathDescriptor
expected string
}{
{"JSON home", targetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "/index.json"},
{"AMP home", targetPathDescriptor{Kind: KindHome, Type: output.AMPFormat}, "/amp/index.html"},
{"HTML home", targetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: output.HTMLFormat}, "/index.html"},
{"Netlify redirects", targetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: noExtDelimFormat}, "/_redirects"},
{"HTML section list", targetPathDescriptor{
Kind: KindSection,
Sections: []string{"sect1"},
BaseName: "_index",
Type: output.HTMLFormat}, "/sect1/index.html"},
{"HTML taxonomy list", targetPathDescriptor{
Kind: KindTaxonomy,
Sections: []string{"tags", "hugo"},
BaseName: "_index",
Type: output.HTMLFormat}, "/tags/hugo/index.html"},
{"HTML taxonomy term", targetPathDescriptor{
Kind: KindTaxonomy,
Sections: []string{"tags"},
BaseName: "_index",
Type: output.HTMLFormat}, "/tags/index.html"},
{
"HTML page", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
BaseName: "mypage",
Sections: []string{"a"},
Type: output.HTMLFormat}, "/a/b/mypage/index.html"},
{
"HTML page with index as base", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
BaseName: "index",
Sections: []string{"a"},
Type: output.HTMLFormat}, "/a/b/index.html"},
{
"HTML page with special chars", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
BaseName: "My Page!",
Type: output.HTMLFormat}, "/a/b/My-Page/index.html"},
{"RSS home", targetPathDescriptor{Kind: kindRSS, Type: output.RSSFormat}, "/index.xml"},
{"RSS section list", targetPathDescriptor{
Kind: kindRSS,
Sections: []string{"sect1"},
Type: output.RSSFormat}, "/sect1/index.xml"},
{
"AMP page", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b/c",
BaseName: "myamp",
Type: output.AMPFormat}, "/amp/a/b/c/myamp/index.html"},
{
"AMP page with URL with suffix", targetPathDescriptor{
Kind: KindPage,
Dir: "/sect/",
BaseName: "mypage",
URL: "/some/other/url.xhtml",
Type: output.HTMLFormat}, "/some/other/url.xhtml"},
{
"JSON page with URL without suffix", targetPathDescriptor{
Kind: KindPage,
Dir: "/sect/",
BaseName: "mypage",
URL: "/some/other/path/",
Type: output.JSONFormat}, "/some/other/path/index.json"},
{
"JSON page with URL without suffix and no trailing slash", targetPathDescriptor{
Kind: KindPage,
Dir: "/sect/",
BaseName: "mypage",
URL: "/some/other/path",
Type: output.JSONFormat}, "/some/other/path/index.json"},
{
"HTML page with expanded permalink", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
BaseName: "mypage",
ExpandedPermalink: "/2017/10/my-title",
Type: output.HTMLFormat}, "/2017/10/my-title/index.html"},
{
"Paginated HTML home", targetPathDescriptor{
Kind: KindHome,
BaseName: "_index",
Type: output.HTMLFormat,
Addends: "page/3"}, "/page/3/index.html"},
{
"Paginated Taxonomy list", targetPathDescriptor{
Kind: KindTaxonomy,
BaseName: "_index",
Sections: []string{"tags", "hugo"},
Type: output.HTMLFormat,
Addends: "page/3"}, "/tags/hugo/page/3/index.html"},
{
"Regular page with addend", targetPathDescriptor{
Kind: KindPage,
Dir: "/a/b",
BaseName: "mypage",
Addends: "c/d/e",
Type: output.HTMLFormat}, "/a/b/mypage/c/d/e/index.html"},
}
for i, test := range tests {
test.d.PathSpec = pathSpec
test.d.UglyURLs = uglyURLs
test.d.LangPrefix = langPrefix
test.d.IsMultihost = multiHost
test.d.Dir = filepath.FromSlash(test.d.Dir)
isUgly := uglyURLs && !test.d.Type.NoUgly
expected := test.expected
// TODO(bep) simplify
if test.d.Kind == KindPage && test.d.BaseName == test.d.Type.BaseName {
} else if test.d.Kind == KindHome && test.d.Type.Path != "" {
} else if (!strings.HasPrefix(expected, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
expected = strings.Replace(expected,
"/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.Suffix(),
"."+test.d.Type.MediaType.Suffix(), -1)
}
if test.d.LangPrefix != "" && !(test.d.Kind == KindPage && test.d.URL != "") {
expected = "/" + test.d.LangPrefix + expected
} else if multiHost && test.d.LangPrefix != "" && test.d.URL != "" {
expected = "/" + test.d.LangPrefix + expected
}
expected = filepath.FromSlash(expected)
pagePath := createTargetPath(test.d)
if pagePath != expected {
t.Fatalf("[%d] [%s] targetPath expected %q, got: %q", i, test.name, expected, pagePath)
}
}
})
}
}
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -63,40 +63,44 @@ func TestPermalink(t *testing.T) {
} }
for i, test := range tests { for i, test := range tests {
t.Run(fmt.Sprintf("%s-%d", test.file, i), func(t *testing.T) {
cfg, fs := newTestCfg() cfg, fs := newTestCfg()
cfg.Set("uglyURLs", test.uglyURLs) cfg.Set("uglyURLs", test.uglyURLs)
cfg.Set("canonifyURLs", test.canonifyURLs) cfg.Set("canonifyURLs", test.canonifyURLs)
cfg.Set("baseURL", test.base) cfg.Set("baseURL", test.base)
pageContent := fmt.Sprintf(`--- pageContent := fmt.Sprintf(`---
title: Page title: Page
slug: %q slug: %q
url: %q url: %q
output: ["HTML"]
--- ---
Content Content
`, test.slug, test.url) `, test.slug, test.url)
writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent) writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
require.Len(t, s.RegularPages, 1) require.Len(t, s.RegularPages(), 1)
p := s.RegularPages[0] p := s.RegularPages()[0]
u := p.Permalink() u := p.Permalink()
expected := test.expectedAbs expected := test.expectedAbs
if u != expected { if u != expected {
t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u) t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u)
} }
u = p.RelPermalink() u = p.RelPermalink()
expected = test.expectedRel expected = test.expectedRel
if u != expected { if u != expected {
t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u) t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u)
} }
})
} }
} }

View File

@ -1,96 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"reflect"
"strings"
"testing"
)
var pageYamlWithTaxonomiesA = `---
tags: ['a', 'B', 'c']
categories: 'd'
---
YAML frontmatter with tags and categories taxonomy.`
var pageYamlWithTaxonomiesB = `---
tags:
- "a"
- "B"
- "c"
categories: 'd'
---
YAML frontmatter with tags and categories taxonomy.`
var pageYamlWithTaxonomiesC = `---
tags: 'E'
categories: 'd'
---
YAML frontmatter with tags and categories taxonomy.`
var pageJSONWithTaxonomies = `{
"categories": "D",
"tags": [
"a",
"b",
"c"
]
}
JSON Front Matter with tags and categories`
var pageTomlWithTaxonomies = `+++
tags = [ "a", "B", "c" ]
categories = "d"
+++
TOML Front Matter with tags and categories`
func TestParseTaxonomies(t *testing.T) {
t.Parallel()
for _, test := range []string{pageTomlWithTaxonomies,
pageJSONWithTaxonomies,
pageYamlWithTaxonomiesA,
pageYamlWithTaxonomiesB,
pageYamlWithTaxonomiesC,
} {
s := newTestSite(t)
p, _ := s.NewPage("page/with/taxonomy")
_, err := p.ReadFrom(strings.NewReader(test))
if err != nil {
t.Fatalf("Failed parsing %q: %s", test, err)
}
param := p.getParamToLower("tags")
if params, ok := param.([]string); ok {
expected := []string{"a", "b", "c"}
if !reflect.DeepEqual(params, expected) {
t.Errorf("Expected %s: got: %s", expected, params)
}
} else if params, ok := param.(string); ok {
expected := "e"
if params != expected {
t.Errorf("Expected %s: got: %s", expected, params)
}
}
param = p.getParamToLower("categories")
singleparam := param.(string)
if singleparam != "d" {
t.Fatalf("Expected: d, got: %s", singleparam)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,183 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"os"
"strings"
"sync"
"testing"
"time"
"github.com/spf13/cast"
)
const (
pageWithInvalidDate = `---
date: 2010-05-02_15:29:31+08:00
---
Page With Invalid Date (replace T with _ for RFC 3339)`
pageWithDateRFC3339 = `---
date: 2010-05-02T15:29:31+08:00
---
Page With Date RFC3339`
pageWithDateRFC3339NoT = `---
date: 2010-05-02 15:29:31+08:00
---
Page With Date RFC3339_NO_T`
pageWithRFC1123 = `---
date: Sun, 02 May 2010 15:29:31 PST
---
Page With Date RFC1123`
pageWithDateRFC1123Z = `---
date: Sun, 02 May 2010 15:29:31 +0800
---
Page With Date RFC1123Z`
pageWithDateRFC822 = `---
date: 02 May 10 15:29 PST
---
Page With Date RFC822`
pageWithDateRFC822Z = `---
date: 02 May 10 15:29 +0800
---
Page With Date RFC822Z`
pageWithDateANSIC = `---
date: Sun May 2 15:29:31 2010
---
Page With Date ANSIC`
pageWithDateUnixDate = `---
date: Sun May 2 15:29:31 PST 2010
---
Page With Date UnixDate`
pageWithDateRubyDate = `---
date: Sun May 02 15:29:31 +0800 2010
---
Page With Date RubyDate`
pageWithDateHugoYearNumeric = `---
date: 2010-05-02
---
Page With Date HugoYearNumeric`
pageWithDateHugoYear = `---
date: 02 May 2010
---
Page With Date HugoYear`
pageWithDateHugoLong = `---
date: 02 May 2010 15:29 PST
---
Page With Date HugoLong`
)
func TestDegenerateDateFrontMatter(t *testing.T) {
t.Parallel()
s := newTestSite(t)
p, _ := s.newPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date")
if p.Date != *new(time.Time) {
t.Fatalf("Date should be set to time.Time zero value. Got: %s", p.Date)
}
}
func TestParsingDateInFrontMatter(t *testing.T) {
t.Parallel()
s := newTestSite(t)
tests := []struct {
buf string
dt string
}{
{pageWithDateRFC3339, "2010-05-02T15:29:31+08:00"},
{pageWithDateRFC3339NoT, "2010-05-02T15:29:31+08:00"},
{pageWithDateRFC1123Z, "2010-05-02T15:29:31+08:00"},
{pageWithDateRFC822Z, "2010-05-02T15:29:00+08:00"},
{pageWithDateANSIC, "2010-05-02T15:29:31Z"},
{pageWithDateRubyDate, "2010-05-02T15:29:31+08:00"},
{pageWithDateHugoYearNumeric, "2010-05-02T00:00:00Z"},
{pageWithDateHugoYear, "2010-05-02T00:00:00Z"},
}
tzShortCodeTests := []struct {
buf string
dt string
}{
{pageWithRFC1123, "2010-05-02T15:29:31-08:00"},
{pageWithDateRFC822, "2010-05-02T15:29:00-08:00Z"},
{pageWithDateUnixDate, "2010-05-02T15:29:31-08:00"},
{pageWithDateHugoLong, "2010-05-02T15:21:00+08:00"},
}
if _, err := time.LoadLocation("PST"); err == nil {
tests = append(tests, tzShortCodeTests...)
} else {
fmt.Fprintf(os.Stderr, "Skipping shortname timezone tests.\n")
}
for _, test := range tests {
dt, e := time.Parse(time.RFC3339, test.dt)
if e != nil {
t.Fatalf("Unable to parse date time (RFC3339) for running the test: %s", e)
}
p, err := s.newPageFrom(strings.NewReader(test.buf), "page/with/date")
if err != nil {
t.Fatalf("Expected to be able to parse page.")
}
if !dt.Equal(p.Date) {
t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n Got: %s. Diff: %s\n internal: %#v\n %#v", test.buf, dt, p.Date, dt.Sub(p.Date), dt, p.Date)
}
}
}
// Temp test https://github.com/gohugoio/hugo/issues/3059
func TestParsingDateParallel(t *testing.T) {
t.Parallel()
var wg sync.WaitGroup
for j := 0; j < 100; j++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < 100; j++ {
dateStr := "2010-05-02 15:29:31 +08:00"
dt, err := time.Parse("2006-01-02 15:04:05 -07:00", dateStr)
if err != nil {
t.Fatal(err)
}
if dt.Year() != 2010 {
t.Fatal("time.Parse: Invalid date:", dt)
}
dt2 := cast.ToTime(dateStr)
if dt2.Year() != 2010 {
t.Fatal("cast.ToTime: Invalid date:", dt2.Year())
}
}
}()
}
wg.Wait()
}

50
hugolib/page_unwrap.go Normal file
View File

@ -0,0 +1,50 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"github.com/pkg/errors"
"github.com/gohugoio/hugo/resources/page"
)
// Wraps a Page.
type pageWrapper interface {
page() page.Page
}
// unwrapPage is used in equality checks and similar.
func unwrapPage(in interface{}) (page.Page, error) {
switch v := in.(type) {
case *pageState:
return v, nil
case pageWrapper:
return v.page(), nil
case page.Page:
return v, nil
case nil:
return nil, nil
default:
return nil, errors.Errorf("unwrapPage: %T not supported", in)
}
}
func mustUnwrapPage(in interface{}) page.Page {
p, err := unwrapPage(in)
if err != nil {
panic(err)
}
return p
}

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,25 +14,24 @@
package hugolib package hugolib
import ( import (
"path/filepath"
"strings"
"testing" "testing"
"github.com/gohugoio/hugo/resources/page"
"github.com/stretchr/testify/require"
) )
var simplePageYAML = `--- func TestUnwrapPage(t *testing.T) {
contenttype: "" assert := require.New(t)
---
Sample Text
`
func TestDegenerateMissingFolderInPageFilename(t *testing.T) { p := &pageState{}
t.Parallel()
s := newTestSite(t) assert.Equal(p, mustUnwrap(newPageForShortcode(p)))
p, err := s.newPageFrom(strings.NewReader(simplePageYAML), filepath.Join("foobar")) }
if err != nil {
t.Fatalf("Error in NewPageFrom") func mustUnwrap(v interface{}) page.Page {
} p, err := unwrapPage(v)
if p.Section() != "" { if err != nil {
t.Fatalf("No section should be set for a file path: foobar") panic(err)
} }
return p
} }

View File

@ -1,67 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"html/template"
)
// PageWithoutContent is sent to the shortcodes. They cannot access the content
// they're a part of. It would cause an infinite regress.
//
// Go doesn't support virtual methods, so this careful dance is currently (I think)
// the best we can do.
type PageWithoutContent struct {
*Page
}
// Content returns an empty string.
func (p *PageWithoutContent) Content() (interface{}, error) {
return "", nil
}
// Truncated always returns false.
func (p *PageWithoutContent) Truncated() bool {
return false
}
// Summary returns an empty string.
func (p *PageWithoutContent) Summary() template.HTML {
return ""
}
// WordCount always returns 0.
func (p *PageWithoutContent) WordCount() int {
return 0
}
// ReadingTime always returns 0.
func (p *PageWithoutContent) ReadingTime() int {
return 0
}
// FuzzyWordCount always returns 0.
func (p *PageWithoutContent) FuzzyWordCount() int {
return 0
}
// Plain returns an empty string.
func (p *PageWithoutContent) Plain() string {
return ""
}
// PlainWords returns an empty string slice.
func (p *PageWithoutContent) PlainWords() []string {
return []string{}
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -17,6 +17,7 @@ import (
"context" "context"
"fmt" "fmt"
"math" "math"
"path/filepath"
"runtime" "runtime"
_errors "github.com/pkg/errors" _errors "github.com/pkg/errors"
@ -38,12 +39,12 @@ type siteContentProcessor struct {
fileSinglesChan chan *fileInfo fileSinglesChan chan *fileInfo
// These assets should be just copied to destination. // These assets should be just copied to destination.
fileAssetsChan chan []pathLangFile fileAssetsChan chan pathLangFile
numWorkers int numWorkers int
// The output Pages // The output Pages
pagesChan chan *Page pagesChan chan *pageState
// Used for partial rebuilds (aka. live reload) // Used for partial rebuilds (aka. live reload)
// Will signal replacement of pages in the site collection. // Will signal replacement of pages in the site collection.
@ -64,9 +65,9 @@ func (s *siteContentProcessor) processSingle(fi *fileInfo) {
} }
} }
func (s *siteContentProcessor) processAssets(assets []pathLangFile) { func (s *siteContentProcessor) processAsset(asset pathLangFile) {
select { select {
case s.fileAssetsChan <- assets: case s.fileAssetsChan <- asset:
case <-s.ctx.Done(): case <-s.ctx.Done():
} }
} }
@ -77,7 +78,7 @@ func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *s
numWorkers = n numWorkers = n
} }
numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.owner.Sites)))) numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.h.Sites))))
return &siteContentProcessor{ return &siteContentProcessor{
ctx: ctx, ctx: ctx,
@ -86,9 +87,9 @@ func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *s
handleContent: newHandlerChain(s), handleContent: newHandlerChain(s),
fileBundlesChan: make(chan *bundleDir, numWorkers), fileBundlesChan: make(chan *bundleDir, numWorkers),
fileSinglesChan: make(chan *fileInfo, numWorkers), fileSinglesChan: make(chan *fileInfo, numWorkers),
fileAssetsChan: make(chan []pathLangFile, numWorkers), fileAssetsChan: make(chan pathLangFile, numWorkers),
numWorkers: numWorkers, numWorkers: numWorkers,
pagesChan: make(chan *Page, numWorkers), pagesChan: make(chan *pageState, numWorkers),
} }
} }
@ -127,6 +128,7 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
if !ok { if !ok {
return nil return nil
} }
err := s.readAndConvertContentFile(f) err := s.readAndConvertContentFile(f)
if err != nil { if err != nil {
return err return err
@ -140,22 +142,20 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
g2.Go(func() error { g2.Go(func() error {
for { for {
select { select {
case files, ok := <-s.fileAssetsChan: case file, ok := <-s.fileAssetsChan:
if !ok { if !ok {
return nil return nil
} }
for _, file := range files { f, err := s.site.BaseFs.Content.Fs.Open(file.Filename())
f, err := s.site.BaseFs.Content.Fs.Open(file.Filename()) if err != nil {
if err != nil { return _errors.Wrap(err, "failed to open assets file")
return _errors.Wrap(err, "failed to open assets file") }
} filename := filepath.Join(s.site.GetTargetLanguageBasePath(), file.Path())
err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, file.Path(), f) err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, filename, f)
f.Close() f.Close()
if err != nil { if err != nil {
return err return err
}
} }
case <-ctx.Done(): case <-ctx.Done():
return ctx.Err() return ctx.Err()
} }
@ -192,8 +192,6 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
return err return err
} }
s.site.rawAllPages.sort()
return nil return nil
} }

View File

@ -116,7 +116,7 @@ func newCapturer(
// these channels. // these channels.
type captureResultHandler interface { type captureResultHandler interface {
handleSingles(fis ...*fileInfo) handleSingles(fis ...*fileInfo)
handleCopyFiles(fis ...pathLangFile) handleCopyFile(fi pathLangFile)
captureBundlesHandler captureBundlesHandler
} }
@ -141,10 +141,10 @@ func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
} }
} }
func (c *captureResultHandlerChain) handleCopyFiles(files ...pathLangFile) { func (c *captureResultHandlerChain) handleCopyFile(file pathLangFile) {
for _, h := range c.handlers { for _, h := range c.handlers {
if hh, ok := h.(captureResultHandler); ok { if hh, ok := h.(captureResultHandler); ok {
hh.handleCopyFiles(files...) hh.handleCopyFile(file)
} }
} }
} }
@ -444,7 +444,7 @@ func (c *capturer) handleNonBundle(
} }
c.handler.handleSingles(f) c.handler.handleSingles(f)
} else { } else {
c.handler.handleCopyFiles(fi) c.handler.handleCopyFile(fi)
} }
} }
} }
@ -457,7 +457,7 @@ func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
c.handler.handleSingles(fi) c.handler.handleSingles(fi)
} else { } else {
// These do not currently need any further processing. // These do not currently need any further processing.
c.handler.handleCopyFiles(fi) c.handler.handleCopyFile(fi)
} }
} }

View File

@ -64,12 +64,10 @@ func (s *storeFilenames) handleBundles(d *bundleDirs) {
s.dirKeys = append(s.dirKeys, keys...) s.dirKeys = append(s.dirKeys, keys...)
} }
func (s *storeFilenames) handleCopyFiles(files ...pathLangFile) { func (s *storeFilenames) handleCopyFile(file pathLangFile) {
s.Lock() s.Lock()
defer s.Unlock() defer s.Unlock()
for _, file := range files { s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
}
} }
func (s *storeFilenames) sortedStr() string { func (s *storeFilenames) sortedStr() string {
@ -224,9 +222,9 @@ C:
type noOpFileStore int type noOpFileStore int
func (noOpFileStore) handleSingles(fis ...*fileInfo) {} func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
func (noOpFileStore) handleBundles(b *bundleDirs) {} func (noOpFileStore) handleBundles(b *bundleDirs) {}
func (noOpFileStore) handleCopyFiles(files ...pathLangFile) {} func (noOpFileStore) handleCopyFile(file pathLangFile) {}
func BenchmarkPageBundlerCapture(b *testing.B) { func BenchmarkPageBundlerCapture(b *testing.B) {
capturers := make([]*capturer, b.N) capturers := make([]*capturer, b.N)

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -17,11 +17,11 @@ import (
"errors" "errors"
"fmt" "fmt"
"path/filepath" "path/filepath"
"sort"
"github.com/gohugoio/hugo/common/hugio"
"strings" "strings"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/resources/resource"
) )
@ -50,13 +50,9 @@ func init() {
func newHandlerChain(s *Site) contentHandler { func newHandlerChain(s *Site) contentHandler {
c := &contentHandlers{s: s} c := &contentHandlers{s: s}
contentFlow := c.parsePage(c.processFirstMatch( contentFlow := c.parsePage(
// Handles all files with a content file extension. See above.
c.handlePageContent(), c.handlePageContent(),
)
// Every HTML file without front matter will be passed on to this handler.
c.handleHTMLContent(),
))
c.rootHandler = c.processFirstMatch( c.rootHandler = c.processFirstMatch(
contentFlow, contentFlow,
@ -93,12 +89,12 @@ func (c *contentHandlers) processFirstMatch(handlers ...contentHandler) func(ctx
type handlerContext struct { type handlerContext struct {
// These are the pages stored in Site. // These are the pages stored in Site.
pages chan<- *Page pages chan<- *pageState
doNotAddToSiteCollections bool doNotAddToSiteCollections bool
currentPage *Page currentPage *pageState
parentPage *Page parentPage *pageState
bundle *bundleDir bundle *bundleDir
@ -110,10 +106,7 @@ type handlerContext struct {
func (c *handlerContext) ext() string { func (c *handlerContext) ext() string {
if c.currentPage != nil { if c.currentPage != nil {
if c.currentPage.Markup != "" { return c.currentPage.contentMarkupType()
return c.currentPage.Markup
}
return c.currentPage.Ext()
} }
if c.bundle != nil { if c.bundle != nil {
@ -175,9 +168,9 @@ func (c *handlerContext) isContentFile() bool {
type ( type (
handlerResult struct { handlerResult struct {
err error err error
handled bool handled bool
resource resource.Resource result interface{}
} }
contentHandler func(ctx *handlerContext) handlerResult contentHandler func(ctx *handlerContext) handlerResult
@ -196,27 +189,27 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
result := handlerResult{handled: true} result := handlerResult{handled: true}
fi := ctx.file() fi := ctx.file()
f, err := fi.Open() content := func() (hugio.ReadSeekCloser, error) {
if err != nil { f, err := fi.Open()
return handlerResult{err: fmt.Errorf("(%s) failed to open content file: %s", fi.Filename(), err)} if err != nil {
return nil, fmt.Errorf("failed to open content file %q: %s", fi.Filename(), err)
}
return f, nil
} }
defer f.Close()
p := c.s.newPageFromFile(fi) ps, err := newPageWithContent(fi, c.s, content)
_, err = p.ReadFrom(f)
if err != nil { if err != nil {
return handlerResult{err: err} return handlerResult{err: err}
} }
if !p.shouldBuild() { if !c.s.shouldBuild(ps) {
if !ctx.doNotAddToSiteCollections { if !ctx.doNotAddToSiteCollections {
ctx.pages <- p ctx.pages <- ps
} }
return result return result
} }
ctx.currentPage = p ctx.currentPage = ps
if ctx.bundle != nil { if ctx.bundle != nil {
// Add the bundled files // Add the bundled files
@ -226,39 +219,20 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
if res.err != nil { if res.err != nil {
return res return res
} }
if res.resource != nil { if res.result != nil {
if pageResource, ok := res.resource.(*Page); ok { switch resv := res.result.(type) {
pageResource.resourcePath = filepath.ToSlash(childCtx.target) case *pageState:
pageResource.parent = p resv.m.resourcePath = filepath.ToSlash(childCtx.target)
resv.parent = ps
ps.addResources(resv)
case resource.Resource:
ps.addResources(resv)
default:
panic("Unknown type")
} }
p.Resources = append(p.Resources, res.resource)
} }
} }
sort.SliceStable(p.Resources, func(i, j int) bool {
if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() {
return true
}
p1, ok1 := p.Resources[i].(*Page)
p2, ok2 := p.Resources[j].(*Page)
if ok1 != ok2 {
return ok2
}
if ok1 {
return defaultPageSort(p1, p2)
}
return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
})
// Assign metadata from front matter if set
if len(p.resourcesMetadata) > 0 {
resources.AssignMetadata(p.resourcesMetadata, p.Resources...)
}
} }
return h(ctx) return h(ctx)
@ -267,39 +241,13 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
func (c *contentHandlers) handlePageContent() contentHandler { func (c *contentHandlers) handlePageContent() contentHandler {
return func(ctx *handlerContext) handlerResult { return func(ctx *handlerContext) handlerResult {
if ctx.supports("html", "htm") {
return notHandled
}
p := ctx.currentPage
p.workContent = p.renderContent(p.workContent)
tmpContent, tmpTableOfContents := helpers.ExtractTOC(p.workContent)
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
p.workContent = tmpContent
if !ctx.doNotAddToSiteCollections {
ctx.pages <- p
}
return handlerResult{handled: true, resource: p}
}
}
func (c *contentHandlers) handleHTMLContent() contentHandler {
return func(ctx *handlerContext) handlerResult {
if !ctx.supports("html", "htm") {
return notHandled
}
p := ctx.currentPage p := ctx.currentPage
if !ctx.doNotAddToSiteCollections { if !ctx.doNotAddToSiteCollections {
ctx.pages <- p ctx.pages <- p
} }
return handlerResult{handled: true, resource: p} return handlerResult{handled: true, result: p}
} }
} }
@ -309,16 +257,31 @@ func (c *contentHandlers) createResource() contentHandler {
return notHandled return notHandled
} }
// TODO(bep) consolidate with multihost logic + clean up
outputFormats := ctx.parentPage.m.outputFormats()
seen := make(map[string]bool)
var targetBasePaths []string
// Make sure bundled resources are published to all of the ouptput formats'
// sub paths.
for _, f := range outputFormats {
p := f.Path
if seen[p] {
continue
}
seen[p] = true
targetBasePaths = append(targetBasePaths, p)
}
resource, err := c.s.ResourceSpec.New( resource, err := c.s.ResourceSpec.New(
resources.ResourceSourceDescriptor{ resources.ResourceSourceDescriptor{
TargetPathBuilder: ctx.parentPage.subResourceTargetPathFactory, TargetPaths: ctx.parentPage.getTargetPaths,
SourceFile: ctx.source, SourceFile: ctx.source,
RelTargetFilename: ctx.target, RelTargetFilename: ctx.target,
URLBase: c.s.GetURLLanguageBasePath(), TargetBasePaths: targetBasePaths,
TargetBasePaths: []string{c.s.GetTargetLanguageBasePath()},
}) })
return handlerResult{err: err, handled: true, resource: resource} return handlerResult{err: err, handled: true, result: resource}
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,12 +14,15 @@
package hugolib package hugolib
import ( import (
"github.com/gohugoio/hugo/common/loggers"
"os" "os"
"path"
"runtime" "runtime"
"strings"
"testing" "testing"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"io" "io"
@ -47,7 +50,11 @@ func TestPageBundlerSiteRegular(t *testing.T) {
for _, baseURLPath := range []string{"", "/hugo"} { for _, baseURLPath := range []string{"", "/hugo"} {
for _, canonify := range []bool{false, true} { for _, canonify := range []bool{false, true} {
for _, ugly := range []bool{false, true} { for _, ugly := range []bool{false, true} {
t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPath), baseURLPathId := baseURLPath
if baseURLPathId == "" {
baseURLPathId = "NONE"
}
t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
func(t *testing.T) { func(t *testing.T) {
baseURL := baseBaseURL + baseURLPath baseURL := baseBaseURL + baseURLPath
relURLBase := baseURLPath relURLBase := baseURLPath
@ -70,9 +77,10 @@ func TestPageBundlerSiteRegular(t *testing.T) {
cfg.Set("outputFormats", map[string]interface{}{ cfg.Set("outputFormats", map[string]interface{}{
"CUSTOMO": map[string]interface{}{ "CUSTOMO": map[string]interface{}{
"mediaType": media.HTMLType, "mediaType": media.HTMLType,
"baseName": "cindex", "baseName": "cindex",
"path": "cpath", "path": "cpath",
"permalinkable": true,
}, },
}) })
@ -84,70 +92,92 @@ func TestPageBundlerSiteRegular(t *testing.T) {
cfg.Set("uglyURLs", ugly) cfg.Set("uglyURLs", ugly)
s := buildSingleSite(t, deps.DepsCfg{Logger: loggers.NewWarningLogger(), Fs: fs, Cfg: cfg}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Logger: loggers.NewErrorLogger(), Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg, s.Fs, t} th := testHelper{s.Cfg, s.Fs, t}
assert.Len(s.RegularPages, 8) assert.Len(s.RegularPages(), 8)
singlePage := s.getPage(KindPage, "a/1.md") singlePage := s.getPage(page.KindPage, "a/1.md")
assert.Equal("", singlePage.BundleType()) assert.Equal("", singlePage.BundleType())
assert.NotNil(singlePage) assert.NotNil(singlePage)
assert.Equal(singlePage, s.getPage("page", "a/1")) assert.Equal(singlePage, s.getPage("page", "a/1"))
assert.Equal(singlePage, s.getPage("page", "1")) assert.Equal(singlePage, s.getPage("page", "1"))
assert.Contains(singlePage.content(), "TheContent") assert.Contains(content(singlePage), "TheContent")
if ugly { relFilename := func(basePath, outBase string) (string, string) {
assert.Equal(relURLBase+"/a/1.html", singlePage.RelPermalink()) rel := basePath
th.assertFileContent(filepath.FromSlash("/work/public/a/1.html"), "TheContent") if ugly {
rel = strings.TrimSuffix(basePath, "/") + ".html"
}
} else { var filename string
assert.Equal(relURLBase+"/a/1/", singlePage.RelPermalink()) if !ugly {
th.assertFileContent(filepath.FromSlash("/work/public/a/1/index.html"), "TheContent") filename = path.Join(basePath, outBase)
} else {
filename = rel
}
rel = fmt.Sprintf("%s%s", relURLBase, rel)
return rel, filename
} }
// Check both output formats
rel, filename := relFilename("/a/1/", "index.html")
th.assertFileContent(filepath.Join("/work/public", filename),
"TheContent",
"Single RelPermalink: "+rel,
)
rel, filename = relFilename("/cpath/a/1/", "cindex.html")
th.assertFileContent(filepath.Join("/work/public", filename),
"TheContent",
"Single RelPermalink: "+rel,
)
th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content") th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content")
// This should be just copied to destination. // This should be just copied to destination.
th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content") th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content")
leafBundle1 := s.getPage(KindPage, "b/my-bundle/index.md") leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md")
assert.NotNil(leafBundle1) assert.NotNil(leafBundle1)
assert.Equal("leaf", leafBundle1.BundleType()) assert.Equal("leaf", leafBundle1.BundleType())
assert.Equal("b", leafBundle1.Section()) assert.Equal("b", leafBundle1.Section())
sectionB := s.getPage(KindSection, "b") sectionB := s.getPage(page.KindSection, "b")
assert.NotNil(sectionB) assert.NotNil(sectionB)
home, _ := s.Info.Home() home, _ := s.Info.Home()
assert.Equal("branch", home.BundleType()) assert.Equal("branch", home.BundleType())
// This is a root bundle and should live in the "home section" // This is a root bundle and should live in the "home section"
// See https://github.com/gohugoio/hugo/issues/4332 // See https://github.com/gohugoio/hugo/issues/4332
rootBundle := s.getPage(KindPage, "root") rootBundle := s.getPage(page.KindPage, "root")
assert.NotNil(rootBundle) assert.NotNil(rootBundle)
assert.True(rootBundle.Parent().IsHome()) assert.True(rootBundle.Parent().IsHome())
if ugly { if !ugly {
assert.Equal(relURLBase+"/root.html", rootBundle.RelPermalink()) th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/")
} else { th.assertFileContent(filepath.FromSlash("/work/public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/")
assert.Equal(relURLBase+"/root/", rootBundle.RelPermalink())
} }
leafBundle2 := s.getPage(KindPage, "a/b/index.md") leafBundle2 := s.getPage(page.KindPage, "a/b/index.md")
assert.NotNil(leafBundle2) assert.NotNil(leafBundle2)
unicodeBundle := s.getPage(KindPage, "c/bundle/index.md") unicodeBundle := s.getPage(page.KindPage, "c/bundle/index.md")
assert.NotNil(unicodeBundle) assert.NotNil(unicodeBundle)
pageResources := leafBundle1.Resources.ByType(pageResourceType) pageResources := leafBundle1.Resources().ByType(pageResourceType)
assert.Len(pageResources, 2) assert.Len(pageResources, 2)
firstPage := pageResources[0].(*Page) firstPage := pageResources[0].(page.Page)
secondPage := pageResources[1].(*Page) secondPage := pageResources[1].(page.Page)
assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle()) assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.File().Filename(), secondPage.File().Filename())
assert.Contains(firstPage.content(), "TheContent") assert.Contains(content(firstPage), "TheContent")
assert.Equal(6, len(leafBundle1.Resources)) assert.Equal(6, len(leafBundle1.Resources()))
// Verify shortcode in bundled page // Verify shortcode in bundled page
assert.Contains(secondPage.content(), filepath.FromSlash("MyShort in b/my-bundle/2.md")) assert.Contains(content(secondPage), filepath.FromSlash("MyShort in b/my-bundle/2.md"))
// https://github.com/gohugoio/hugo/issues/4582 // https://github.com/gohugoio/hugo/issues/4582
assert.Equal(leafBundle1, firstPage.Parent()) assert.Equal(leafBundle1, firstPage.Parent())
@ -157,20 +187,10 @@ func TestPageBundlerSiteRegular(t *testing.T) {
assert.Equal(secondPage, pageResources.GetMatch("2*")) assert.Equal(secondPage, pageResources.GetMatch("2*"))
assert.Nil(pageResources.GetMatch("doesnotexist*")) assert.Nil(pageResources.GetMatch("doesnotexist*"))
imageResources := leafBundle1.Resources.ByType("image") imageResources := leafBundle1.Resources().ByType("image")
assert.Equal(3, len(imageResources)) assert.Equal(3, len(imageResources))
image := imageResources[0]
altFormat := leafBundle1.OutputFormats().Get("CUSTOMO") assert.NotNil(leafBundle1.OutputFormats().Get("CUSTOMO"))
assert.NotNil(altFormat)
assert.Equal(baseURL+"/2017/pageslug/c/logo.png", image.Permalink())
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
// Custom media type defined in site config.
assert.Len(leafBundle1.Resources.ByType("bepsays"), 1)
relPermalinker := func(s string) string { relPermalinker := func(s string) string {
return fmt.Sprintf(s, relURLBase) return fmt.Sprintf(s, relURLBase)
@ -180,12 +200,33 @@ func TestPageBundlerSiteRegular(t *testing.T) {
return fmt.Sprintf(s, baseURL) return fmt.Sprintf(s, baseURL)
} }
if permalinker == nil { if ugly {
th.assertFileContent("/work/public/2017/pageslug.html",
relPermalinker("Single RelPermalink: %s/2017/pageslug.html"),
permalinker("Single Permalink: %s/2017/pageslug.html"),
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
} else {
th.assertFileContent("/work/public/2017/pageslug/index.html",
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
th.assertFileContent("/work/public/cpath/2017/pageslug/cindex.html",
relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"),
relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"),
relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"),
)
} }
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
th.assertFileNotExist("/work/public/cpath/cpath/2017/pageslug/c/logo.png")
// Custom media type defined in site config.
assert.Len(leafBundle1.Resources().ByType("bepsays"), 1)
if ugly { if ugly {
assert.Equal(relURLBase+"/2017/pageslug.html", leafBundle1.RelPermalink())
assert.Equal(baseURL+"/2017/pageslug.html", leafBundle1.Permalink())
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"), th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"),
"TheContent", "TheContent",
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
@ -202,23 +243,15 @@ func TestPageBundlerSiteRegular(t *testing.T) {
) )
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent") th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
assert.Equal(relURLBase+"/a/b.html", leafBundle2.RelPermalink())
// 은행 // 은행
assert.Equal(relURLBase+"/c/%EC%9D%80%ED%96%89.html", unicodeBundle.RelPermalink())
th.assertFileContent(filepath.FromSlash("/work/public/c/은행.html"), "Content for 은행")
th.assertFileContent(filepath.FromSlash("/work/public/c/은행/logo-은행.png"), "은행 PNG") th.assertFileContent(filepath.FromSlash("/work/public/c/은행/logo-은행.png"), "은행 PNG")
} else { } else {
assert.Equal(relURLBase+"/2017/pageslug/", leafBundle1.RelPermalink())
assert.Equal(baseURL+"/2017/pageslug/", leafBundle1.Permalink())
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent") th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent") th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent")
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "Single Title") th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "Single Title")
th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single Title") th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single Title")
assert.Equal(relURLBase+"/a/b/", leafBundle2.RelPermalink())
} }
}) })
@ -249,11 +282,11 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
s := sites.Sites[0] s := sites.Sites[0]
assert.Equal(8, len(s.RegularPages)) assert.Equal(8, len(s.RegularPages()))
assert.Equal(16, len(s.Pages)) assert.Equal(16, len(s.Pages()))
assert.Equal(31, len(s.AllPages)) assert.Equal(31, len(s.AllPages()))
bundleWithSubPath := s.getPage(KindPage, "lb/index") bundleWithSubPath := s.getPage(page.KindPage, "lb/index")
assert.NotNil(bundleWithSubPath) assert.NotNil(bundleWithSubPath)
// See https://github.com/gohugoio/hugo/issues/4312 // See https://github.com/gohugoio/hugo/issues/4312
@ -267,30 +300,30 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
// and probably also just b (aka "my-bundle") // and probably also just b (aka "my-bundle")
// These may also be translated, so we also need to test that. // These may also be translated, so we also need to test that.
// "bf", "my-bf-bundle", "index.md + nn // "bf", "my-bf-bundle", "index.md + nn
bfBundle := s.getPage(KindPage, "bf/my-bf-bundle/index") bfBundle := s.getPage(page.KindPage, "bf/my-bf-bundle/index")
assert.NotNil(bfBundle) assert.NotNil(bfBundle)
assert.Equal("en", bfBundle.Lang()) assert.Equal("en", bfBundle.Language().Lang)
assert.Equal(bfBundle, s.getPage(KindPage, "bf/my-bf-bundle/index.md")) assert.Equal(bfBundle, s.getPage(page.KindPage, "bf/my-bf-bundle/index.md"))
assert.Equal(bfBundle, s.getPage(KindPage, "bf/my-bf-bundle")) assert.Equal(bfBundle, s.getPage(page.KindPage, "bf/my-bf-bundle"))
assert.Equal(bfBundle, s.getPage(KindPage, "my-bf-bundle")) assert.Equal(bfBundle, s.getPage(page.KindPage, "my-bf-bundle"))
nnSite := sites.Sites[1] nnSite := sites.Sites[1]
assert.Equal(7, len(nnSite.RegularPages)) assert.Equal(7, len(nnSite.RegularPages()))
bfBundleNN := nnSite.getPage(KindPage, "bf/my-bf-bundle/index") bfBundleNN := nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index")
assert.NotNil(bfBundleNN) assert.NotNil(bfBundleNN)
assert.Equal("nn", bfBundleNN.Lang()) assert.Equal("nn", bfBundleNN.Language().Lang)
assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "bf/my-bf-bundle/index.nn.md")) assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index.nn.md"))
assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "bf/my-bf-bundle")) assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "bf/my-bf-bundle"))
assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "my-bf-bundle")) assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "my-bf-bundle"))
// See https://github.com/gohugoio/hugo/issues/4295 // See https://github.com/gohugoio/hugo/issues/4295
// Every resource should have its Name prefixed with its base folder. // Every resource should have its Name prefixed with its base folder.
cBundleResources := bundleWithSubPath.Resources.Match("c/**") cBundleResources := bundleWithSubPath.Resources().Match("c/**")
assert.Equal(4, len(cBundleResources)) assert.Equal(4, len(cBundleResources))
bundlePage := bundleWithSubPath.Resources.GetMatch("c/page*") bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*")
assert.NotNil(bundlePage) assert.NotNil(bundlePage)
assert.IsType(&Page{}, bundlePage) assert.IsType(&pageState{}, bundlePage)
}) })
} }
@ -329,15 +362,15 @@ func TestMultilingualDisableLanguage(t *testing.T) {
s := sites.Sites[0] s := sites.Sites[0]
assert.Equal(8, len(s.RegularPages)) assert.Equal(8, len(s.RegularPages()))
assert.Equal(16, len(s.Pages)) assert.Equal(16, len(s.Pages()))
// No nn pages // No nn pages
assert.Equal(16, len(s.AllPages)) assert.Equal(16, len(s.AllPages()))
for _, p := range s.rawAllPages { for _, p := range s.rawAllPages {
assert.True(p.Lang() != "nn") assert.True(p.Language().Lang != "nn")
} }
for _, p := range s.AllPages { for _, p := range s.AllPages() {
assert.True(p.Lang() != "nn") assert.True(p.Language().Lang != "nn")
} }
} }
@ -358,11 +391,11 @@ func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
th := testHelper{s.Cfg, s.Fs, t} th := testHelper{s.Cfg, s.Fs, t}
assert.Equal(7, len(s.RegularPages)) assert.Equal(7, len(s.RegularPages()))
a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md") a1Bundle := s.getPage(page.KindPage, "symbolic2/a1/index.md")
assert.NotNil(a1Bundle) assert.NotNil(a1Bundle)
assert.Equal(2, len(a1Bundle.Resources)) assert.Equal(2, len(a1Bundle.Resources()))
assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType))) assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType)))
th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent") th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent")
th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent") th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent")
@ -416,28 +449,27 @@ HEADLESS {{< myShort >}}
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
assert.Equal(1, len(s.RegularPages)) assert.Equal(1, len(s.RegularPages()))
assert.Equal(1, len(s.headlessPages)) assert.Equal(1, len(s.headlessPages))
regular := s.getPage(KindPage, "a/index") regular := s.getPage(page.KindPage, "a/index")
assert.Equal("/a/s1/", regular.RelPermalink()) assert.Equal("/a/s1/", regular.RelPermalink())
headless := s.getPage(KindPage, "b/index") headless := s.getPage(page.KindPage, "b/index")
assert.NotNil(headless) assert.NotNil(headless)
assert.True(headless.headless)
assert.Equal("Headless Bundle in Topless Bar", headless.Title()) assert.Equal("Headless Bundle in Topless Bar", headless.Title())
assert.Equal("", headless.RelPermalink()) assert.Equal("", headless.RelPermalink())
assert.Equal("", headless.Permalink()) assert.Equal("", headless.Permalink())
assert.Contains(headless.content(), "HEADLESS SHORTCODE") assert.Contains(content(headless), "HEADLESS SHORTCODE")
headlessResources := headless.Resources headlessResources := headless.Resources()
assert.Equal(3, len(headlessResources)) assert.Equal(3, len(headlessResources))
assert.Equal(2, len(headlessResources.Match("l*"))) assert.Equal(2, len(headlessResources.Match("l*")))
pageResource := headlessResources.GetMatch("p*") pageResource := headlessResources.GetMatch("p*")
assert.NotNil(pageResource) assert.NotNil(pageResource)
assert.IsType(&Page{}, pageResource) assert.IsType(&pageState{}, pageResource)
p := pageResource.(*Page) p := pageResource.(page.Page)
assert.Contains(p.content(), "SHORTCODE") assert.Contains(content(p), "SHORTCODE")
assert.Equal("p1.md", p.Name()) assert.Equal("p1.md", p.Name())
th := testHelper{s.Cfg, s.Fs, t} th := testHelper{s.Cfg, s.Fs, t}
@ -451,6 +483,91 @@ HEADLESS {{< myShort >}}
} }
func TestMultiSiteBundles(t *testing.T) {
assert := require.New(t)
b := newTestSitesBuilder(t)
b.WithConfigFile("toml", `
baseURL = "http://example.com/"
defaultContentLanguage = "en"
[languages]
[languages.en]
weight = 10
contentDir = "content/en"
[languages.nn]
weight = 20
contentDir = "content/nn"
`)
b.WithContent("en/mybundle/index.md", `
---
headless: true
---
`)
b.WithContent("nn/mybundle/index.md", `
---
headless: true
---
`)
b.WithContent("en/mybundle/data.yaml", `data en`)
b.WithContent("en/mybundle/forms.yaml", `forms en`)
b.WithContent("nn/mybundle/data.yaml", `data nn`)
b.WithContent("en/_index.md", `
---
Title: Home
---
Home content.
`)
b.WithContent("en/section-not-bundle/_index.md", `
---
Title: Section Page
---
Section content.
`)
b.WithContent("en/section-not-bundle/single.md", `
---
Title: Section Single
Date: 2018-02-01
---
Single content.
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn")
b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en")
b.AssertFileContent("public/mybundle/data.yaml", "data en")
b.AssertFileContent("public/mybundle/forms.yaml", "forms en")
assert.False(b.CheckExists("public/nn/nn/mybundle/data.yaml"))
assert.False(b.CheckExists("public/en/mybundle/data.yaml"))
homeEn := b.H.Sites[0].home
assert.NotNil(homeEn)
assert.Equal(2018, homeEn.Date().Year())
b.AssertFileContent("public/section-not-bundle/index.html", "Section Page", "Content: <p>Section content.</p>")
b.AssertFileContent("public/section-not-bundle/single/index.html", "Section Single", "|<p>Single content.</p>")
}
func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) { func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) {
cfg, fs := newTestCfg() cfg, fs := newTestCfg()
assert := require.New(t) assert := require.New(t)
@ -512,6 +629,8 @@ TheContent.
singleLayout := ` singleLayout := `
Single Title: {{ .Title }} Single Title: {{ .Title }}
Single RelPermalink: {{ .RelPermalink }}
Single Permalink: {{ .Permalink }}
Content: {{ .Content }} Content: {{ .Content }}
{{ $sunset := .Resources.GetMatch "my-sunset-1*" }} {{ $sunset := .Resources.GetMatch "my-sunset-1*" }}
{{ with $sunset }} {{ with $sunset }}
@ -532,7 +651,7 @@ Thumb RelPermalink: {{ $thumb.RelPermalink }}
` `
myShort := ` myShort := `
MyShort in {{ .Page.Path }}: MyShort in {{ .Page.File.Path }}:
{{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }} {{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }}
{{ with $sunset }} {{ with $sunset }}
Short Sunset RelPermalink: {{ .RelPermalink }} Short Sunset RelPermalink: {{ .RelPermalink }}
@ -599,6 +718,7 @@ Content for 은행.
assert.NoError(err) assert.NoError(err)
_, err = io.Copy(out, src) _, err = io.Copy(out, src)
assert.NoError(err)
out.Close() out.Close()
src.Seek(0, 0) src.Seek(0, 0)
_, err = io.Copy(out2, src) _, err = io.Copy(out2, src)

View File

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,43 +18,65 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"strings" "strings"
"sync"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/cache" "github.com/gohugoio/hugo/cache"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/page"
) )
// Used in the page cache to mark more than one hit for a given key.
var ambiguityFlag = &pageState{}
// PageCollections contains the page collections for a site. // PageCollections contains the page collections for a site.
type PageCollections struct { type PageCollections struct {
// Includes only pages of all types, and only pages in the current language.
Pages Pages
// Includes all pages in all languages, including the current one.
// Includes pages of all types.
AllPages Pages
// A convenience cache for the traditional index types, taxonomies, home page etc.
// This is for the current language only.
indexPages Pages
// A convenience cache for the regular pages.
// This is for the current language only.
RegularPages Pages
// A convenience cache for the all the regular pages.
AllRegularPages Pages
// Includes absolute all pages (of all types), including drafts etc. // Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages rawAllPages pageStatePages
// rawAllPages plus additional pages created during the build process.
workAllPages pageStatePages
// Includes headless bundles, i.e. bundles that produce no output for its content page. // Includes headless bundles, i.e. bundles that produce no output for its content page.
headlessPages Pages headlessPages pageStatePages
// Lazy initialized page collections
pages *lazyPagesFactory
regularPages *lazyPagesFactory
allPages *lazyPagesFactory
allRegularPages *lazyPagesFactory
// The index for .Site.GetPage etc.
pageIndex *cache.Lazy pageIndex *cache.Lazy
} }
// Pages returns all pages.
// This is for the current language only.
func (c *PageCollections) Pages() page.Pages {
return c.pages.get()
}
// RegularPages returns all the regular pages.
// This is for the current language only.
func (c *PageCollections) RegularPages() page.Pages {
return c.regularPages.get()
}
// AllPages returns all pages for all languages.
func (c *PageCollections) AllPages() page.Pages {
return c.allPages.get()
}
// AllPages returns all regular pages for all languages.
func (c *PageCollections) AllRegularPages() page.Pages {
return c.allRegularPages.get()
}
// Get initializes the index if not already done so, then // Get initializes the index if not already done so, then
// looks up the given page ref, returns nil if no value found. // looks up the given page ref, returns nil if no value found.
func (c *PageCollections) getFromCache(ref string) (*Page, error) { func (c *PageCollections) getFromCache(ref string) (page.Page, error) {
v, found, err := c.pageIndex.Get(ref) v, found, err := c.pageIndex.Get(ref)
if err != nil { if err != nil {
return nil, err return nil, err
@ -63,7 +85,7 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) {
return nil, nil return nil, nil
} }
p := v.(*Page) p := v.(page.Page)
if p != ambiguityFlag { if p != ambiguityFlag {
return p, nil return p, nil
@ -71,17 +93,49 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) {
return nil, fmt.Errorf("page reference %q is ambiguous", ref) return nil, fmt.Errorf("page reference %q is ambiguous", ref)
} }
var ambiguityFlag = &Page{Kind: kindUnknown, title: "ambiguity flag"} type lazyPagesFactory struct {
pages page.Pages
func (c *PageCollections) refreshPageCaches() { init sync.Once
c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages) factory page.PagesFactory
c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages) }
c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
indexLoader := func() (map[string]interface{}, error) { func (l *lazyPagesFactory) get() page.Pages {
l.init.Do(func() {
l.pages = l.factory()
})
return l.pages
}
func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory {
return &lazyPagesFactory{factory: factory}
}
func newPageCollections() *PageCollections {
return newPageCollectionsFromPages(nil)
}
func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
c := &PageCollections{rawAllPages: pages}
c.pages = newLazyPagesFactory(func() page.Pages {
pages := make(page.Pages, len(c.workAllPages))
for i, p := range c.workAllPages {
pages[i] = p
}
return pages
})
c.regularPages = newLazyPagesFactory(func() page.Pages {
return c.findPagesByKindInWorkPages(page.KindPage, c.workAllPages)
})
c.pageIndex = cache.NewLazy(func() (map[string]interface{}, error) {
index := make(map[string]interface{}) index := make(map[string]interface{})
add := func(ref string, p *Page) { add := func(ref string, p page.Page) {
ref = strings.ToLower(ref)
existing := index[ref] existing := index[ref]
if existing == nil { if existing == nil {
index[ref] = p index[ref] = p
@ -90,71 +144,63 @@ func (c *PageCollections) refreshPageCaches() {
} }
} }
for _, pageCollection := range []Pages{c.RegularPages, c.headlessPages} { for _, pageCollection := range []pageStatePages{c.workAllPages, c.headlessPages} {
for _, p := range pageCollection { for _, p := range pageCollection {
sourceRef := p.absoluteSourceRef() if p.IsPage() {
sourceRef := p.sourceRef()
if sourceRef != "" { if sourceRef != "" {
// index the canonical ref // index the canonical ref
// e.g. /section/article.md // e.g. /section/article.md
add(sourceRef, p) add(sourceRef, p)
} }
// Ref/Relref supports this potentially ambiguous lookup. // Ref/Relref supports this potentially ambiguous lookup.
add(p.LogicalName(), p) add(p.File().LogicalName(), p)
translationBaseName := p.TranslationBaseName() translationBaseName := p.File().TranslationBaseName()
dir, _ := path.Split(sourceRef) dir, _ := path.Split(sourceRef)
dir = strings.TrimSuffix(dir, "/") dir = strings.TrimSuffix(dir, "/")
if translationBaseName == "index" { if translationBaseName == "index" {
add(dir, p) add(dir, p)
add(path.Base(dir), p) add(path.Base(dir), p)
} else {
add(translationBaseName, p)
}
// We need a way to get to the current language version.
pathWithNoExtensions := path.Join(dir, translationBaseName)
add(pathWithNoExtensions, p)
} else { } else {
add(translationBaseName, p) // index the canonical, unambiguous ref for any backing file
// e.g. /section/_index.md
sourceRef := p.sourceRef()
if sourceRef != "" {
add(sourceRef, p)
}
ref := p.SectionsPath()
// index the canonical, unambiguous virtual ref
// e.g. /section
// (this may already have been indexed above)
add("/"+ref, p)
} }
// We need a way to get to the current language version.
pathWithNoExtensions := path.Join(dir, translationBaseName)
add(pathWithNoExtensions, p)
} }
} }
for _, p := range c.indexPages {
// index the canonical, unambiguous ref for any backing file
// e.g. /section/_index.md
sourceRef := p.absoluteSourceRef()
if sourceRef != "" {
add(sourceRef, p)
}
ref := path.Join(p.sections...)
// index the canonical, unambiguous virtual ref
// e.g. /section
// (this may already have been indexed above)
add("/"+ref, p)
}
return index, nil return index, nil
} })
c.pageIndex = cache.NewLazy(indexLoader) return c
}
func newPageCollections() *PageCollections {
return &PageCollections{}
}
func newPageCollectionsFromPages(pages Pages) *PageCollections {
return &PageCollections{rawAllPages: pages}
} }
// This is an adapter func for the old API with Kind as first argument. // This is an adapter func for the old API with Kind as first argument.
// This is invoked when you do .Site.GetPage. We drop the Kind and fails // This is invoked when you do .Site.GetPage. We drop the Kind and fails
// if there are more than 2 arguments, which would be ambigous. // if there are more than 2 arguments, which would be ambigous.
func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) { func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) {
var refs []string var refs []string
for _, r := range ref { for _, r := range ref {
// A common construct in the wild is // A common construct in the wild is
@ -173,10 +219,10 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) {
return nil, fmt.Errorf(`too many arguments to .Site.GetPage: %v. Use lookups on the form {{ .Site.GetPage "/posts/mypage-md" }}`, ref) return nil, fmt.Errorf(`too many arguments to .Site.GetPage: %v. Use lookups on the form {{ .Site.GetPage "/posts/mypage-md" }}`, ref)
} }
if len(refs) == 0 || refs[0] == KindHome { if len(refs) == 0 || refs[0] == page.KindHome {
key = "/" key = "/"
} else if len(refs) == 1 { } else if len(refs) == 1 {
if len(ref) == 2 && refs[0] == KindSection { if len(ref) == 2 && refs[0] == page.KindSection {
// This is an old style reference to the "Home Page section". // This is an old style reference to the "Home Page section".
// Typically fetched via {{ .Site.GetPage "section" .Section }} // Typically fetched via {{ .Site.GetPage "section" .Section }}
// See https://github.com/gohugoio/hugo/issues/4989 // See https://github.com/gohugoio/hugo/issues/4989
@ -197,17 +243,18 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) {
} }
// Only used in tests. // Only used in tests.
func (c *PageCollections) getPage(typ string, sections ...string) *Page { func (c *PageCollections) getPage(typ string, sections ...string) page.Page {
refs := append([]string{typ}, path.Join(sections...)) refs := append([]string{typ}, path.Join(sections...))
p, _ := c.getPageOldVersion(refs...) p, _ := c.getPageOldVersion(refs...)
return p return p
} }
// Ref is either unix-style paths (i.e. callers responsible for // Case insensitive page lookup.
// calling filepath.ToSlash as necessary) or shorthand refs. func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) {
func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
var anError error var anError error
ref = strings.ToLower(ref)
// Absolute (content root relative) reference. // Absolute (content root relative) reference.
if strings.HasPrefix(ref, "/") { if strings.HasPrefix(ref, "/") {
p, err := c.getFromCache(ref) p, err := c.getFromCache(ref)
@ -220,7 +267,7 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
} else if context != nil { } else if context != nil {
// Try the page-relative path. // Try the page-relative path.
ppath := path.Join("/", strings.Join(context.sections, "/"), ref) ppath := path.Join("/", strings.ToLower(context.SectionsPath()), ref)
p, err := c.getFromCache(ppath) p, err := c.getFromCache(ppath)
if err == nil && p != nil { if err == nil && p != nil {
return p, nil return p, nil
@ -236,7 +283,8 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
if err == nil && p != nil { if err == nil && p != nil {
if context != nil { if context != nil {
// TODO(bep) remove this case and the message below when the storm has passed // TODO(bep) remove this case and the message below when the storm has passed
helpers.DistinctFeedbackLog.Printf(`WARNING: make non-relative ref/relref page reference(s) in page %q absolute, e.g. {{< ref "/blog/my-post.md" >}}`, context.absoluteSourceRef()) err := wrapErr(errors.New(`make non-relative ref/relref page reference(s) in page %q absolute, e.g. {{< ref "/blog/my-post.md" >}}`), context)
helpers.DistinctWarnLog.Println(err)
} }
return p, nil return p, nil
} }
@ -253,49 +301,56 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
} }
if p == nil && anError != nil { if p == nil && anError != nil {
if context != nil { return nil, wrapErr(errors.Wrap(anError, "failed to resolve ref"), context)
return nil, fmt.Errorf("failed to resolve path from page %q: %s", context.absoluteSourceRef(), anError)
}
return nil, fmt.Errorf("failed to resolve page: %s", anError)
} }
return p, nil return p, nil
} }
func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages { func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
var pages Pages var pages page.Pages
for _, p := range inPages { for _, p := range inPages {
if p.Kind == kind { if p.Kind() == kind {
pages = append(pages, p) pages = append(pages, p)
} }
} }
return pages return pages
} }
func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page { func (c *PageCollections) findPagesByKind(kind string) page.Pages {
return c.findPagesByKindIn(kind, c.Pages())
}
func (c *PageCollections) findWorkPagesByKind(kind string) pageStatePages {
var pages pageStatePages
for _, p := range c.workAllPages {
if p.Kind() == kind {
pages = append(pages, p)
}
}
return pages
}
func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStatePages) page.Pages {
var pages page.Pages
for _, p := range inPages { for _, p := range inPages {
if p.Kind == kind { if p.Kind() == kind {
pages = append(pages, p)
}
}
return pages
}
func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState {
for _, p := range c.workAllPages {
if p.Kind() == kind {
return p return p
} }
} }
return nil return nil
} }
func (*PageCollections) findPagesByKindNotIn(kind string, inPages Pages) Pages { func (c *PageCollections) addPage(page *pageState) {
var pages Pages
for _, p := range inPages {
if p.Kind != kind {
pages = append(pages, p)
}
}
return pages
}
func (c *PageCollections) findPagesByKind(kind string) Pages {
return c.findPagesByKindIn(kind, c.Pages)
}
func (c *PageCollections) addPage(page *Page) {
c.rawAllPages = append(c.rawAllPages, page) c.rawAllPages = append(c.rawAllPages, page)
} }
@ -307,35 +362,31 @@ func (c *PageCollections) removePageFilename(filename string) {
} }
func (c *PageCollections) removePage(page *Page) { func (c *PageCollections) removePage(page *pageState) {
if i := c.rawAllPages.findPagePos(page); i >= 0 { if i := c.rawAllPages.findPagePos(page); i >= 0 {
c.clearResourceCacheForPage(c.rawAllPages[i]) c.clearResourceCacheForPage(c.rawAllPages[i])
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...) c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
} }
} }
func (c *PageCollections) findPagesByShortcode(shortcode string) Pages { func (c *PageCollections) findPagesByShortcode(shortcode string) page.Pages {
var pages Pages var pages page.Pages
for _, p := range c.rawAllPages { for _, p := range c.rawAllPages {
if p.shortcodeState != nil { if p.HasShortcode(shortcode) {
if _, ok := p.shortcodeState.nameSet[shortcode]; ok { pages = append(pages, p)
pages = append(pages, p)
}
} }
} }
return pages return pages
} }
func (c *PageCollections) replacePage(page *Page) { func (c *PageCollections) replacePage(page *pageState) {
// will find existing page that matches filepath and remove it // will find existing page that matches filepath and remove it
c.removePage(page) c.removePage(page)
c.addPage(page) c.addPage(page)
} }
func (c *PageCollections) clearResourceCacheForPage(page *Page) { func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
if len(page.Resources) > 0 { if len(page.resources) > 0 {
page.s.ResourceSpec.DeleteCacheByPrefix(page.relTargetPathBase) page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -21,6 +21,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -98,12 +100,12 @@ func BenchmarkGetPageRegular(b *testing.B) {
type testCase struct { type testCase struct {
kind string kind string
context *Page context page.Page
path []string path []string
expectedTitle string expectedTitle string
} }
func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.Assertions) { func (t *testCase) check(p page.Page, err error, errorMsg string, assert *require.Assertions) {
switch t.kind { switch t.kind {
case "Ambiguous": case "Ambiguous":
assert.Error(err) assert.Error(err)
@ -114,8 +116,8 @@ func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.As
default: default:
assert.NoError(err, errorMsg) assert.NoError(err, errorMsg)
assert.NotNil(p, errorMsg) assert.NotNil(p, errorMsg)
assert.Equal(t.kind, p.Kind, errorMsg) assert.Equal(t.kind, p.Kind(), errorMsg)
assert.Equal(t.expectedTitle, p.title, errorMsg) assert.Equal(t.expectedTitle, p.Title(), errorMsg)
} }
} }
@ -159,62 +161,62 @@ func TestGetPage(t *testing.T) {
tests := []testCase{ tests := []testCase{
// legacy content root relative paths // legacy content root relative paths
{KindHome, nil, []string{}, "home page"}, {page.KindHome, nil, []string{}, "home page"},
{KindPage, nil, []string{"about.md"}, "about page"}, {page.KindPage, nil, []string{"about.md"}, "about page"},
{KindSection, nil, []string{"sect3"}, "section 3"}, {page.KindSection, nil, []string{"sect3"}, "section 3"},
{KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"}, {page.KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
{KindPage, nil, []string{"sect4/page2.md"}, "Title4_2"}, {page.KindPage, nil, []string{"sect4/page2.md"}, "Title4_2"},
{KindSection, nil, []string{"sect3/sect7"}, "another sect7"}, {page.KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
{KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"}, {page.KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
{KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"}, //test OS-specific path {page.KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"}, //test OS-specific path
// shorthand refs (potentially ambiguous) // shorthand refs (potentially ambiguous)
{KindPage, nil, []string{"unique.md"}, "UniqueBase"}, {page.KindPage, nil, []string{"unique.md"}, "UniqueBase"},
{"Ambiguous", nil, []string{"page1.md"}, ""}, {"Ambiguous", nil, []string{"page1.md"}, ""},
// ISSUE: This is an ambiguous ref, but because we have to support the legacy // ISSUE: This is an ambiguous ref, but because we have to support the legacy
// content root relative paths without a leading slash, the lookup // content root relative paths without a leading slash, the lookup
// returns /sect7. This undermines ambiguity detection, but we have no choice. // returns /sect7. This undermines ambiguity detection, but we have no choice.
//{"Ambiguous", nil, []string{"sect7"}, ""}, //{"Ambiguous", nil, []string{"sect7"}, ""},
{KindSection, nil, []string{"sect7"}, "Sect7s"}, {page.KindSection, nil, []string{"sect7"}, "Sect7s"},
// absolute paths // absolute paths
{KindHome, nil, []string{"/"}, "home page"}, {page.KindHome, nil, []string{"/"}, "home page"},
{KindPage, nil, []string{"/about.md"}, "about page"}, {page.KindPage, nil, []string{"/about.md"}, "about page"},
{KindSection, nil, []string{"/sect3"}, "section 3"}, {page.KindSection, nil, []string{"/sect3"}, "section 3"},
{KindPage, nil, []string{"/sect3/page1.md"}, "Title3_1"}, {page.KindPage, nil, []string{"/sect3/page1.md"}, "Title3_1"},
{KindPage, nil, []string{"/sect4/page2.md"}, "Title4_2"}, {page.KindPage, nil, []string{"/sect4/page2.md"}, "Title4_2"},
{KindSection, nil, []string{"/sect3/sect7"}, "another sect7"}, {page.KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
{KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"}, {page.KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
{KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path {page.KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"}, //next test depends on this page existing {page.KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"}, //next test depends on this page existing
// {"NoPage", nil, []string{"/unique.md"}, ""}, // ISSUE #4969: this is resolving to /sect3/unique.md // {"NoPage", nil, []string{"/unique.md"}, ""}, // ISSUE #4969: this is resolving to /sect3/unique.md
{"NoPage", nil, []string{"/missing-page.md"}, ""}, {"NoPage", nil, []string{"/missing-page.md"}, ""},
{"NoPage", nil, []string{"/missing-section"}, ""}, {"NoPage", nil, []string{"/missing-section"}, ""},
// relative paths // relative paths
{KindHome, sec3, []string{".."}, "home page"}, {page.KindHome, sec3, []string{".."}, "home page"},
{KindHome, sec3, []string{"../"}, "home page"}, {page.KindHome, sec3, []string{"../"}, "home page"},
{KindPage, sec3, []string{"../about.md"}, "about page"}, {page.KindPage, sec3, []string{"../about.md"}, "about page"},
{KindSection, sec3, []string{"."}, "section 3"}, {page.KindSection, sec3, []string{"."}, "section 3"},
{KindSection, sec3, []string{"./"}, "section 3"}, {page.KindSection, sec3, []string{"./"}, "section 3"},
{KindPage, sec3, []string{"page1.md"}, "Title3_1"}, {page.KindPage, sec3, []string{"page1.md"}, "Title3_1"},
{KindPage, sec3, []string{"./page1.md"}, "Title3_1"}, {page.KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
{KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"}, {page.KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
{KindSection, sec3, []string{"sect7"}, "another sect7"}, {page.KindSection, sec3, []string{"sect7"}, "another sect7"},
{KindSection, sec3, []string{"./sect7"}, "another sect7"}, {page.KindSection, sec3, []string{"./sect7"}, "another sect7"},
{KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"}, {page.KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
{KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"}, {page.KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
{KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path {page.KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{KindPage, sec3, []string{"./unique.md"}, "UniqueBase"}, {page.KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
{"NoPage", sec3, []string{"./sect2"}, ""}, {"NoPage", sec3, []string{"./sect2"}, ""},
//{"NoPage", sec3, []string{"sect2"}, ""}, // ISSUE: /sect3 page relative query is resolving to /sect2 //{"NoPage", sec3, []string{"sect2"}, ""}, // ISSUE: /sect3 page relative query is resolving to /sect2
// absolute paths ignore context // absolute paths ignore context
{KindHome, sec3, []string{"/"}, "home page"}, {page.KindHome, sec3, []string{"/"}, "home page"},
{KindPage, sec3, []string{"/about.md"}, "about page"}, {page.KindPage, sec3, []string{"/about.md"}, "about page"},
{KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"}, {page.KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
{KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing {page.KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing
{"NoPage", sec3, []string{"/subsect/deep.md"}, ""}, {"NoPage", sec3, []string{"/subsect/deep.md"}, ""},
} }

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -21,6 +21,8 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
// TODO(bep) move and rewrite in resource/page.
func TestMergeLanguages(t *testing.T) { func TestMergeLanguages(t *testing.T) {
t.Parallel() t.Parallel()
assert := require.New(t) assert := require.New(t)
@ -36,12 +38,12 @@ func TestMergeLanguages(t *testing.T) {
frSite := h.Sites[1] frSite := h.Sites[1]
nnSite := h.Sites[2] nnSite := h.Sites[2]
assert.Equal(31, len(enSite.RegularPages)) assert.Equal(31, len(enSite.RegularPages()))
assert.Equal(6, len(frSite.RegularPages)) assert.Equal(6, len(frSite.RegularPages()))
assert.Equal(12, len(nnSite.RegularPages)) assert.Equal(12, len(nnSite.RegularPages()))
for i := 0; i < 2; i++ { for i := 0; i < 2; i++ {
mergedNN := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages) mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
assert.Equal(31, len(mergedNN)) assert.Equal(31, len(mergedNN))
for i := 1; i <= 31; i++ { for i := 1; i <= 31; i++ {
expectedLang := "en" expectedLang := "en"
@ -49,11 +51,11 @@ func TestMergeLanguages(t *testing.T) {
expectedLang = "nn" expectedLang = "nn"
} }
p := mergedNN[i-1] p := mergedNN[i-1]
assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) assert.Equal(expectedLang, p.Language().Lang, fmt.Sprintf("Test %d", i))
} }
} }
mergedFR := frSite.RegularPages.MergeByLanguage(enSite.RegularPages) mergedFR := frSite.RegularPages().MergeByLanguage(enSite.RegularPages())
assert.Equal(31, len(mergedFR)) assert.Equal(31, len(mergedFR))
for i := 1; i <= 31; i++ { for i := 1; i <= 31; i++ {
expectedLang := "en" expectedLang := "en"
@ -61,28 +63,28 @@ func TestMergeLanguages(t *testing.T) {
expectedLang = "fr" expectedLang = "fr"
} }
p := mergedFR[i-1] p := mergedFR[i-1]
assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) assert.Equal(expectedLang, p.Language().Lang, fmt.Sprintf("Test %d", i))
} }
firstNN := nnSite.RegularPages[0] firstNN := nnSite.RegularPages()[0]
assert.Equal(4, len(firstNN.Sites())) assert.Equal(4, len(firstNN.Sites()))
assert.Equal("en", firstNN.Sites().First().Language().Lang) assert.Equal("en", firstNN.Sites().First().Language().Lang)
nnBundle := nnSite.getPage("page", "bundle") nnBundle := nnSite.getPage("page", "bundle")
enBundle := enSite.getPage("page", "bundle") enBundle := enSite.getPage("page", "bundle")
assert.Equal(6, len(enBundle.Resources)) assert.Equal(6, len(enBundle.Resources()))
assert.Equal(2, len(nnBundle.Resources)) assert.Equal(2, len(nnBundle.Resources()))
var ri interface{} = nnBundle.Resources var ri interface{} = nnBundle.Resources()
// This looks less ugly in the templates ... // This looks less ugly in the templates ...
mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources) mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources())
assert.Equal(6, len(mergedNNResources)) assert.Equal(6, len(mergedNNResources))
unchanged, err := nnSite.RegularPages.MergeByLanguageInterface(nil) unchanged, err := nnSite.RegularPages().MergeByLanguageInterface(nil)
assert.NoError(err) assert.NoError(err)
assert.Equal(nnSite.RegularPages, unchanged) assert.Equal(nnSite.RegularPages(), unchanged)
} }
@ -93,7 +95,7 @@ func TestMergeLanguagesTemplate(t *testing.T) {
b.WithTemplates("home.html", ` b.WithTemplates("home.html", `
{{ $pages := .Site.RegularPages }} {{ $pages := .Site.RegularPages }}
{{ .Scratch.Set "pages" $pages }} {{ .Scratch.Set "pages" $pages }}
{{ if eq .Lang "nn" }}: {{ if eq .Language.Lang "nn" }}:
{{ $enSite := index .Sites 0 }} {{ $enSite := index .Sites 0 }}
{{ $frSite := index .Sites 1 }} {{ $frSite := index .Sites 1 }}
{{ $nnBundle := .Site.GetPage "page" "bundle" }} {{ $nnBundle := .Site.GetPage "page" "bundle" }}
@ -103,8 +105,8 @@ func TestMergeLanguagesTemplate(t *testing.T) {
{{ end }} {{ end }}
{{ $pages := .Scratch.Get "pages" }} {{ $pages := .Scratch.Get "pages" }}
{{ $pages2 := .Scratch.Get "pages2" }} {{ $pages2 := .Scratch.Get "pages2" }}
Pages1: {{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .Path }} {{ .Lang }} | {{ end }} Pages1: {{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .File.Path }} {{ .Language.Lang }} | {{ end }}
Pages2: {{ range $i, $p := $pages2 }}{{ add $i 1 }}: {{ .Title }} {{ .Lang }} | {{ end }} Pages2: {{ range $i, $p := $pages2 }}{{ add $i 1 }}: {{ .Title }} {{ .Language.Lang }} | {{ end }}
`, `,
"shortcodes/shortcode.html", "MyShort", "shortcodes/shortcode.html", "MyShort",
@ -178,7 +180,7 @@ func BenchmarkMergeByLanguage(b *testing.B) {
nnSite := h.Sites[2] nnSite := h.Sites[2]
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
merged := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages) merged := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
if len(merged) != count { if len(merged) != count {
b.Fatal("Count mismatch") b.Fatal("Count mismatch")
} }

View File

@ -1,75 +0,0 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path/filepath"
"testing"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/deps"
"github.com/stretchr/testify/require"
)
func TestRelated(t *testing.T) {
assert := require.New(t)
t.Parallel()
var (
cfg, fs = newTestCfg()
//th = testHelper{cfg, fs, t}
)
pageTmpl := `---
title: Page %d
keywords: [%s]
date: %s
---
Content
`
writeSource(t, fs, filepath.Join("content", "page1.md"), fmt.Sprintf(pageTmpl, 1, "hugo, says", "2017-01-03"))
writeSource(t, fs, filepath.Join("content", "page2.md"), fmt.Sprintf(pageTmpl, 2, "hugo, rocks", "2017-01-02"))
writeSource(t, fs, filepath.Join("content", "page3.md"), fmt.Sprintf(pageTmpl, 3, "bep, says", "2017-01-01"))
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
assert.Len(s.RegularPages, 3)
result, err := s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
assert.NoError(err)
assert.Len(result, 2)
assert.Equal("Page 2", result[0].title)
assert.Equal("Page 1", result[1].title)
result, err = s.RegularPages.Related(s.RegularPages[0])
assert.Len(result, 2)
assert.Equal("Page 2", result[0].title)
assert.Equal("Page 3", result[1].title)
result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords")
assert.Len(result, 2)
assert.Equal("Page 2", result[0].title)
assert.Equal("Page 3", result[1].title)
result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
assert.NoError(err)
assert.Len(result, 2)
assert.Equal("Page 2", result[0].title)
assert.Equal("Page 3", result[1].title)
}

View File

@ -1,579 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"html/template"
"path/filepath"
"strings"
"testing"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/output"
"github.com/stretchr/testify/require"
)
func TestSplitPages(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 21)
chunks := splitPages(pages, 5)
require.Equal(t, 5, len(chunks))
for i := 0; i < 4; i++ {
require.Equal(t, 5, chunks[i].Len())
}
lastChunk := chunks[4]
require.Equal(t, 1, lastChunk.Len())
}
func TestSplitPageGroups(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 21)
groups, _ := pages.GroupBy("Weight", "desc")
chunks := splitPageGroups(groups, 5)
require.Equal(t, 5, len(chunks))
firstChunk := chunks[0]
// alternate weight 5 and 10
if groups, ok := firstChunk.(PagesGroup); ok {
require.Equal(t, 5, groups.Len())
for _, pg := range groups {
// first group 10 in weight
require.Equal(t, 10, pg.Key)
for _, p := range pg.Pages {
require.True(t, p.fuzzyWordCount%2 == 0) // magic test
}
}
} else {
t.Fatal("Excepted PageGroup")
}
lastChunk := chunks[4]
if groups, ok := lastChunk.(PagesGroup); ok {
require.Equal(t, 1, groups.Len())
for _, pg := range groups {
// last should have 5 in weight
require.Equal(t, 5, pg.Key)
for _, p := range pg.Pages {
require.True(t, p.fuzzyWordCount%2 != 0) // magic test
}
}
} else {
t.Fatal("Excepted PageGroup")
}
}
func TestPager(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 21)
groups, _ := pages.GroupBy("Weight", "desc")
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
_, err := newPaginatorFromPages(pages, -1, urlFactory)
require.NotNil(t, err)
_, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
require.NotNil(t, err)
pag, err := newPaginatorFromPages(pages, 5, urlFactory)
require.Nil(t, err)
doTestPages(t, pag)
first := pag.Pagers()[0].First()
require.Equal(t, "Pager 1", first.String())
require.NotEmpty(t, first.Pages())
require.Empty(t, first.PageGroups())
pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
require.Nil(t, err)
doTestPages(t, pag)
first = pag.Pagers()[0].First()
require.NotEmpty(t, first.PageGroups())
require.Empty(t, first.Pages())
}
func doTestPages(t *testing.T, paginator *paginator) {
paginatorPages := paginator.Pagers()
require.Equal(t, 5, len(paginatorPages))
require.Equal(t, 21, paginator.TotalNumberOfElements())
require.Equal(t, 5, paginator.PageSize())
require.Equal(t, 5, paginator.TotalPages())
first := paginatorPages[0]
require.Equal(t, template.HTML("page/1/"), first.URL())
require.Equal(t, first, first.First())
require.True(t, first.HasNext())
require.Equal(t, paginatorPages[1], first.Next())
require.False(t, first.HasPrev())
require.Nil(t, first.Prev())
require.Equal(t, 5, first.NumberOfElements())
require.Equal(t, 1, first.PageNumber())
third := paginatorPages[2]
require.True(t, third.HasNext())
require.True(t, third.HasPrev())
require.Equal(t, paginatorPages[1], third.Prev())
last := paginatorPages[4]
require.Equal(t, template.HTML("page/5/"), last.URL())
require.Equal(t, last, last.Last())
require.False(t, last.HasNext())
require.Nil(t, last.Next())
require.True(t, last.HasPrev())
require.Equal(t, 1, last.NumberOfElements())
require.Equal(t, 5, last.PageNumber())
}
func TestPagerNoPages(t *testing.T) {
t.Parallel()
s := newTestSite(t)
pages := createTestPages(s, 0)
groups, _ := pages.GroupBy("Weight", "desc")
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
doTestPagerNoPages(t, paginator)
first := paginator.Pagers()[0].First()
require.Empty(t, first.PageGroups())
require.Empty(t, first.Pages())
paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
doTestPagerNoPages(t, paginator)
first = paginator.Pagers()[0].First()
require.Empty(t, first.PageGroups())
require.Empty(t, first.Pages())
}
func doTestPagerNoPages(t *testing.T, paginator *paginator) {
paginatorPages := paginator.Pagers()
require.Equal(t, 1, len(paginatorPages))
require.Equal(t, 0, paginator.TotalNumberOfElements())
require.Equal(t, 5, paginator.PageSize())
require.Equal(t, 0, paginator.TotalPages())
// pageOne should be nothing but the first
pageOne := paginatorPages[0]
require.NotNil(t, pageOne.First())
require.False(t, pageOne.HasNext())
require.False(t, pageOne.HasPrev())
require.Nil(t, pageOne.Next())
require.Equal(t, 1, len(pageOne.Pagers()))
require.Equal(t, 0, pageOne.Pages().Len())
require.Equal(t, 0, pageOne.NumberOfElements())
require.Equal(t, 0, pageOne.TotalNumberOfElements())
require.Equal(t, 0, pageOne.TotalPages())
require.Equal(t, 1, pageOne.PageNumber())
require.Equal(t, 5, pageOne.PageSize())
}
func TestPaginationURLFactory(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg()
cfg.Set("paginatePath", "zoo")
for _, uglyURLs := range []bool{false, true} {
for _, canonifyURLs := range []bool{false, true} {
t.Run(fmt.Sprintf("uglyURLs=%t,canonifyURLs=%t", uglyURLs, canonifyURLs), func(t *testing.T) {
tests := []struct {
name string
d targetPathDescriptor
baseURL string
page int
expected string
}{
{"HTML home page 32",
targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/", 32, "/zoo/32/"},
{"JSON home page 42",
targetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "http://example.com/", 42, "/zoo/42/"},
// Issue #1252
{"BaseURL with sub path",
targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/sub/", 999, "/sub/zoo/999/"},
}
for _, test := range tests {
d := test.d
cfg.Set("baseURL", test.baseURL)
cfg.Set("canonifyURLs", canonifyURLs)
cfg.Set("uglyURLs", uglyURLs)
d.UglyURLs = uglyURLs
expected := test.expected
if canonifyURLs {
expected = strings.Replace(expected, "/sub", "", 1)
}
if uglyURLs {
expected = expected[:len(expected)-1] + "." + test.d.Type.MediaType.Suffix()
}
pathSpec := newTestPathSpec(fs, cfg)
d.PathSpec = pathSpec
factory := newPaginationURLFactory(d)
got := factory(test.page)
require.Equal(t, expected, got)
}
})
}
}
}
func TestPaginator(t *testing.T) {
t.Parallel()
for _, useViper := range []bool{false, true} {
doTestPaginator(t, useViper)
}
}
func doTestPaginator(t *testing.T, useViper bool) {
cfg, fs := newTestCfg()
pagerSize := 5
if useViper {
cfg.Set("paginate", pagerSize)
} else {
cfg.Set("paginate", -1)
}
s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
require.NoError(t, err)
pages := createTestPages(s, 12)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n1.data["Pages"] = pages
var paginator1 *Pager
if useViper {
paginator1, err = n1.Paginator()
} else {
paginator1, err = n1.Paginator(pagerSize)
}
require.Nil(t, err)
require.NotNil(t, paginator1)
require.Equal(t, 3, paginator1.TotalPages())
require.Equal(t, 12, paginator1.TotalNumberOfElements())
n2.paginator = paginator1.Next()
paginator2, err := n2.Paginator()
require.Nil(t, err)
require.Equal(t, paginator2, paginator1.Next())
n1.data["Pages"] = createTestPages(s, 1)
samePaginator, _ := n1.Paginator()
require.Equal(t, paginator1, samePaginator)
pp, _ := s.NewPage("test")
p, _ := newPageOutput(pp, false, false, output.HTMLFormat)
_, err = p.Paginator()
require.NotNil(t, err)
}
func TestPaginatorWithNegativePaginate(t *testing.T) {
t.Parallel()
s := newTestSite(t, "paginate", -1)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
_, err := n1.Paginator()
require.Error(t, err)
}
func TestPaginate(t *testing.T) {
t.Parallel()
for _, useViper := range []bool{false, true} {
doTestPaginate(t, useViper)
}
}
func TestPaginatorURL(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg()
cfg.Set("paginate", 2)
cfg.Set("paginatePath", "testing")
for i := 0; i < 10; i++ {
// Issue #2177, do not double encode URLs
writeSource(t, fs, filepath.Join("content", "阅读", fmt.Sprintf("page%d.md", (i+1))),
fmt.Sprintf(`---
title: Page%d
---
Conten%d
`, (i+1), i+1))
}
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html><body>{{.Content}}</body></html>")
writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
`
<html><body>
Count: {{ .Paginator.TotalNumberOfElements }}
Pages: {{ .Paginator.TotalPages }}
{{ range .Paginator.Pagers -}}
{{ .PageNumber }}: {{ .URL }}
{{ end }}
</body></html>`)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
th := testHelper{s.Cfg, s.Fs, t}
th.assertFileContent(filepath.Join("public", "阅读", "testing", "2", "index.html"), "2: /%E9%98%85%E8%AF%BB/testing/2/")
}
func doTestPaginate(t *testing.T, useViper bool) {
pagerSize := 5
var (
s *Site
err error
)
if useViper {
s = newTestSite(t, "paginate", pagerSize)
} else {
s = newTestSite(t, "paginate", -1)
}
pages := createTestPages(s, 6)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
var paginator1, paginator2 *Pager
if useViper {
paginator1, err = n1.Paginate(pages)
} else {
paginator1, err = n1.Paginate(pages, pagerSize)
}
require.Nil(t, err)
require.NotNil(t, paginator1)
require.Equal(t, 2, paginator1.TotalPages())
require.Equal(t, 6, paginator1.TotalNumberOfElements())
n2.paginator = paginator1.Next()
if useViper {
paginator2, err = n2.Paginate(pages)
} else {
paginator2, err = n2.Paginate(pages, pagerSize)
}
require.Nil(t, err)
require.Equal(t, paginator2, paginator1.Next())
pp, err := s.NewPage("test")
p, _ := newPageOutput(pp, false, false, output.HTMLFormat)
_, err = p.Paginate(pages)
require.NotNil(t, err)
}
func TestInvalidOptions(t *testing.T) {
t.Parallel()
s := newTestSite(t)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
_, err := n1.Paginate(createTestPages(s, 1), 1, 2)
require.NotNil(t, err)
_, err = n1.Paginator(1, 2)
require.NotNil(t, err)
_, err = n1.Paginator(-1)
require.NotNil(t, err)
}
func TestPaginateWithNegativePaginate(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg()
cfg.Set("paginate", -1)
s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
require.NoError(t, err)
n, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
_, err = n.Paginate(createTestPages(s, 2))
require.NotNil(t, err)
}
func TestPaginatePages(t *testing.T) {
t.Parallel()
s := newTestSite(t)
groups, _ := createTestPages(s, 31).GroupBy("Weight", "desc")
pd := targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat, PathSpec: s.PathSpec, Addends: "t"}
for i, seq := range []interface{}{createTestPages(s, 11), groups, WeightedPages{}, PageGroup{}, &Pages{}} {
v, err := paginatePages(pd, seq, 11)
require.NotNil(t, v, "Val %d", i)
require.Nil(t, err, "Err %d", i)
}
_, err := paginatePages(pd, Site{}, 11)
require.NotNil(t, err)
}
// Issue #993
func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) {
t.Parallel()
s := newTestSite(t, "paginate", 10)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
_, err := n1.Paginator()
require.Nil(t, err)
_, err = n1.Paginate(createTestPages(s, 2))
require.NotNil(t, err)
_, err = n2.Paginate(createTestPages(s, 2))
require.Nil(t, err)
}
func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) {
t.Parallel()
s := newTestSite(t, "paginate", 10)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
p1 := createTestPages(s, 2)
p2 := createTestPages(s, 10)
_, err := n1.Paginate(p1)
require.Nil(t, err)
_, err = n1.Paginate(p1)
require.Nil(t, err)
_, err = n1.Paginate(p2)
require.NotNil(t, err)
_, err = n2.Paginate(p2)
require.Nil(t, err)
}
func TestProbablyEqualPageLists(t *testing.T) {
t.Parallel()
s := newTestSite(t)
fivePages := createTestPages(s, 5)
zeroPages := createTestPages(s, 0)
zeroPagesByWeight, _ := createTestPages(s, 0).GroupBy("Weight", "asc")
fivePagesByWeight, _ := createTestPages(s, 5).GroupBy("Weight", "asc")
ninePagesByWeight, _ := createTestPages(s, 9).GroupBy("Weight", "asc")
for i, this := range []struct {
v1 interface{}
v2 interface{}
expect bool
}{
{nil, nil, true},
{"a", "b", true},
{"a", fivePages, false},
{fivePages, "a", false},
{fivePages, createTestPages(s, 2), false},
{fivePages, fivePages, true},
{zeroPages, zeroPages, true},
{fivePagesByWeight, fivePagesByWeight, true},
{zeroPagesByWeight, fivePagesByWeight, false},
{zeroPagesByWeight, zeroPagesByWeight, true},
{fivePagesByWeight, fivePages, false},
{fivePagesByWeight, ninePagesByWeight, false},
} {
result := probablyEqualPageLists(this.v1, this.v2)
if result != this.expect {
t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
}
}
}
func TestPage(t *testing.T) {
t.Parallel()
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
s := newTestSite(t)
fivePages := createTestPages(s, 7)
fivePagesFuzzyWordCount, _ := createTestPages(s, 7).GroupBy("FuzzyWordCount", "asc")
p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
f1 := p1.pagers[0].First()
f2 := p2.pagers[0].First()
page11, _ := f1.page(1)
page1Nil, _ := f1.page(3)
page21, _ := f2.page(1)
page2Nil, _ := f2.page(3)
require.Equal(t, 3, page11.fuzzyWordCount)
require.Nil(t, page1Nil)
require.Equal(t, 3, page21.fuzzyWordCount)
require.Nil(t, page2Nil)
}
func createTestPages(s *Site, num int) Pages {
pages := make(Pages, num)
for i := 0; i < num; i++ {
p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/z/p%d.md", i)))
w := 5
if i%2 == 0 {
w = 10
}
p.fuzzyWordCount = i + 2
p.Weight = w
pages[i] = p
}
return pages
}

View File

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -75,7 +75,7 @@ func (c *themesCollector) add(name, configFilename string) (ThemeConfig, error)
var err error var err error
cfg, err = config.FromFile(c.fs, configFilename) cfg, err = config.FromFile(c.fs, configFilename)
if err != nil { if err != nil {
return tc, nil return tc, err
} }
} }

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,8 +14,7 @@
package hugolib package hugolib
var ( var (
_ Permalinker = (*Page)(nil) _ Permalinker = (*pageState)(nil)
_ Permalinker = (*OutputFormat)(nil)
) )
// Permalinker provides permalinks of both the relative and absolute kind. // Permalinker provides permalinks of both the relative and absolute kind.

View File

@ -1,213 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"errors"
"fmt"
"path"
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/gohugoio/hugo/helpers"
)
// pathPattern represents a string which builds up a URL from attributes
type pathPattern string
// pageToPermaAttribute is the type of a function which, given a page and a tag
// can return a string to go in that position in the page (or an error)
type pageToPermaAttribute func(*Page, string) (string, error)
// PermalinkOverrides maps a section name to a PathPattern
type PermalinkOverrides map[string]pathPattern
// knownPermalinkAttributes maps :tags in a permalink specification to a
// function which, given a page and the tag, returns the resulting string
// to be used to replace that tag.
var knownPermalinkAttributes map[string]pageToPermaAttribute
var attributeRegexp = regexp.MustCompile(`:\w+`)
// validate determines if a PathPattern is well-formed
func (pp pathPattern) validate() bool {
fragments := strings.Split(string(pp[1:]), "/")
var bail = false
for i := range fragments {
if bail {
return false
}
if len(fragments[i]) == 0 {
bail = true
continue
}
matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
if matches == nil {
continue
}
for _, match := range matches {
k := strings.ToLower(match[0][1:])
if _, ok := knownPermalinkAttributes[k]; !ok {
return false
}
}
}
return true
}
type permalinkExpandError struct {
pattern pathPattern
section string
err error
}
func (pee *permalinkExpandError) Error() string {
return fmt.Sprintf("error expanding %q section %q: %s", string(pee.pattern), pee.section, pee.err)
}
var (
errPermalinkIllFormed = errors.New("permalink ill-formed")
errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
)
// Expand on a PathPattern takes a Page and returns the fully expanded Permalink
// or an error explaining the failure.
func (pp pathPattern) Expand(p *Page) (string, error) {
if !pp.validate() {
return "", &permalinkExpandError{pattern: pp, section: "<all>", err: errPermalinkIllFormed}
}
sections := strings.Split(string(pp), "/")
for i, field := range sections {
if len(field) == 0 {
continue
}
matches := attributeRegexp.FindAllStringSubmatch(field, -1)
if matches == nil {
continue
}
newField := field
for _, match := range matches {
attr := match[0][1:]
callback, ok := knownPermalinkAttributes[attr]
if !ok {
return "", &permalinkExpandError{pattern: pp, section: strconv.Itoa(i), err: errPermalinkAttributeUnknown}
}
newAttr, err := callback(p, attr)
if err != nil {
return "", &permalinkExpandError{pattern: pp, section: strconv.Itoa(i), err: err}
}
newField = strings.Replace(newField, match[0], newAttr, 1)
}
sections[i] = newField
}
return strings.Join(sections, "/"), nil
}
func pageToPermalinkDate(p *Page, dateField string) (string, error) {
// a Page contains a Node which provides a field Date, time.Time
switch dateField {
case "year":
return strconv.Itoa(p.Date.Year()), nil
case "month":
return fmt.Sprintf("%02d", int(p.Date.Month())), nil
case "monthname":
return p.Date.Month().String(), nil
case "day":
return fmt.Sprintf("%02d", p.Date.Day()), nil
case "weekday":
return strconv.Itoa(int(p.Date.Weekday())), nil
case "weekdayname":
return p.Date.Weekday().String(), nil
case "yearday":
return strconv.Itoa(p.Date.YearDay()), nil
}
//TODO: support classic strftime escapes too
// (and pass those through despite not being in the map)
panic("coding error: should not be here")
}
// pageToPermalinkTitle returns the URL-safe form of the title
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
// Page contains Node which has Title
// (also contains URLPath which has Slug, sometimes)
return p.s.PathSpec.URLize(p.title), nil
}
// pageToPermalinkFilename returns the URL-safe form of the filename
func pageToPermalinkFilename(p *Page, _ string) (string, error) {
name := p.File.TranslationBaseName()
if name == "index" {
// Page bundles; the directory name will hopefully have a better name.
dir := strings.TrimSuffix(p.File.Dir(), helpers.FilePathSeparator)
_, name = filepath.Split(dir)
}
return p.s.PathSpec.URLize(name), nil
}
// if the page has a slug, return the slug, else return the title
func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) {
if p.Slug != "" {
// Don't start or end with a -
// TODO(bep) this doesn't look good... Set the Slug once.
if strings.HasPrefix(p.Slug, "-") {
p.Slug = p.Slug[1:len(p.Slug)]
}
if strings.HasSuffix(p.Slug, "-") {
p.Slug = p.Slug[0 : len(p.Slug)-1]
}
return p.s.PathSpec.URLize(p.Slug), nil
}
return pageToPermalinkTitle(p, a)
}
func pageToPermalinkSection(p *Page, _ string) (string, error) {
return p.Section(), nil
}
func pageToPermalinkSections(p *Page, _ string) (string, error) {
return path.Join(p.CurrentSection().sections...), nil
}
func init() {
knownPermalinkAttributes = map[string]pageToPermaAttribute{
"year": pageToPermalinkDate,
"month": pageToPermalinkDate,
"monthname": pageToPermalinkDate,
"day": pageToPermalinkDate,
"weekday": pageToPermalinkDate,
"weekdayname": pageToPermalinkDate,
"yearday": pageToPermalinkDate,
"section": pageToPermalinkSection,
"sections": pageToPermalinkSections,
"title": pageToPermalinkTitle,
"slug": pageToPermalinkSlugElseTitle,
"filename": pageToPermalinkFilename,
}
}

Some files were not shown because too many files have changed in this diff Show More