Add Hugo Piper with SCSS support and much more

Before this commit, you would have to use page bundles to do image processing etc. in Hugo.

This commit adds

* A new `/assets` top-level project or theme dir (configurable via `assetDir`)
* A new template func, `resources.Get` which can be used to "get a resource" that can be further processed.

This means that you can now do this in your templates (or shortcodes):

```bash
{{ $sunset := (resources.Get "images/sunset.jpg").Fill "300x200" }}
```

This also adds a new `extended` build tag that enables powerful SCSS/SASS support with source maps. To compile this from source, you will also need a C compiler installed:

```
HUGO_BUILD_TAGS=extended mage install
```

Note that you can use output of the SCSS processing later in a non-SCSSS-enabled Hugo.

The `SCSS` processor is a _Resource transformation step_ and it can be chained with the many others in a pipeline:

```bash
{{ $css := resources.Get "styles.scss" | resources.ToCSS | resources.PostCSS | resources.Minify | resources.Fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```

The transformation funcs above have aliases, so it can be shortened to:

```bash
{{ $css := resources.Get "styles.scss" | toCSS | postCSS | minify | fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```

A quick tip would be to avoid the fingerprinting part, and possibly also the not-superfast `postCSS` when you're doing development, as it allows Hugo to be smarter about the rebuilding.

Documentation will follow, but have a look at the demo repo in https://github.com/bep/hugo-sass-test

New functions to create `Resource` objects:

* `resources.Get` (see above)
* `resources.FromString`: Create a Resource from a string.

New `Resource` transformation funcs:

* `resources.ToCSS`: Compile `SCSS` or `SASS` into `CSS`.
* `resources.PostCSS`: Process your CSS with PostCSS. Config file support (project or theme or passed as an option).
* `resources.Minify`: Currently supports `css`, `js`, `json`, `html`, `svg`, `xml`.
* `resources.Fingerprint`: Creates a fingerprinted version of the given Resource with Subresource Integrity..
* `resources.Concat`: Concatenates a list of Resource objects. Think of this as a poor man's bundler.
* `resources.ExecuteAsTemplate`: Parses and executes the given Resource and data context (e.g. .Site) as a Go template.

Fixes #4381
Fixes #4903
Fixes #4858
This commit is contained in:
Bjørn Erik Pedersen 2018-02-20 10:02:14 +01:00
parent a5d0a57e6b
commit dea71670c0
No known key found for this signature in database
GPG Key ID: 330E6E2BD4859D8F
90 changed files with 4685 additions and 1125 deletions

2
.gitignore vendored
View File

@ -15,5 +15,7 @@ vendor/*/
*.debug
coverage*.out
dock.sh
GoBuilds
dist

View File

@ -1,6 +1,8 @@
language: go
sudo: false
dist: trusty
env:
HUGO_BUILD_TAGS="extended"
git:
depth: false
go:
@ -18,8 +20,9 @@ install:
- go get github.com/magefile/mage
- mage -v vendor
script:
- mage -v hugoRace
- mage -v test
- mage -v check
- mage -v hugo
- ./hugo -s docs/
- ./hugo --renderToMemory -s docs/
before_install:

View File

@ -192,6 +192,12 @@ To list all available commands along with descriptions:
mage -l
```
**Note:** From Hugo 0.43 we have added a build tag, `extended` that adds **SCSS support**. This needs a C compiler installed to build. You can enable this when building by:
```bash
HUGO_BUILD_TAGS=extended mage install
````
### Updating the Hugo Sources
If you want to stay in sync with the Hugo repository, you can easily pull down

0
Dockerfile Normal file → Executable file
View File

66
Gopkg.lock generated
View File

@ -1,6 +1,12 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
name = "github.com/BurntSushi/locker"
packages = ["."]
revision = "a6e239ea1c69bff1cfdb20c4b73dadf52f784b6a"
[[projects]]
branch = "master"
name = "github.com/BurntSushi/toml"
@ -68,6 +74,16 @@
packages = ["."]
revision = "012701e8669671499fc43e9792335a1dcbfe2afb"
[[projects]]
branch = "master"
name = "github.com/bep/go-tocss"
packages = [
"scss",
"scss/libsass",
"tocss"
]
revision = "2abb118dc8688b6c7df44e12f4152c2bded9b19c"
[[projects]]
name = "github.com/chaseadamsio/goorgeous"
packages = ["."]
@ -107,6 +123,12 @@
revision = "487489b64fb796de2e55f4e8a4ad1e145f80e957"
version = "v1.1.6"
[[projects]]
branch = "master"
name = "github.com/dsnet/golib"
packages = ["memfile"]
revision = "1ea1667757804fdcccc5a1810e09aba618885ac2"
[[projects]]
branch = "master"
name = "github.com/eknkc/amber"
@ -231,6 +253,12 @@
revision = "fd2f6c1403b37925bd7fe13af05853b8ae58ee5f"
version = "v1.3.6"
[[projects]]
branch = "master"
name = "github.com/mitchellh/hashstructure"
packages = ["."]
revision = "2bca23e0e452137f789efbc8610126fd8b94f73b"
[[projects]]
branch = "master"
name = "github.com/mitchellh/mapstructure"
@ -355,6 +383,42 @@
revision = "12b6f73e6084dad08a7c6e575284b177ecafbc71"
version = "v1.2.1"
[[projects]]
name = "github.com/tdewolff/minify"
packages = [
".",
"css",
"html",
"js",
"json",
"svg",
"xml"
]
revision = "8d72a4127ae33b755e95bffede9b92e396267ce2"
version = "v2.3.5"
[[projects]]
name = "github.com/tdewolff/parse"
packages = [
".",
"buffer",
"css",
"html",
"js",
"json",
"strconv",
"svg",
"xml"
]
revision = "d739d6fccb0971177e06352fea02d3552625efb1"
version = "v2.3.3"
[[projects]]
branch = "master"
name = "github.com/wellington/go-libsass"
packages = ["libs"]
revision = "615eaa47ef794d037c1906a0eb7bf85375a5decf"
[[projects]]
name = "github.com/yosssi/ace"
packages = ["."]
@ -431,6 +495,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "78b19539f7321429f217fc482de9e7cb4e2edd9b054ba8ec36b1e62bc4281b4f"
inputs-digest = "aaf909f54ae33c5a70f692e19e59834106bcbbe5d16724ff3998907734e32c0b"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -16,6 +16,14 @@
branch = "master"
name = "github.com/bep/gitmap"
[[constraint]]
branch = "master"
name = "github.com/bep/go-tocss"
[[override]]
branch = "master"
name = "github.com/wellington/go-libsass"
[[constraint]]
name = "github.com/chaseadamsio/goorgeous"
version = "^1.1.0"
@ -149,3 +157,15 @@
[[constraint]]
name = "github.com/bep/debounce"
version = "^1.1.0"
[[constraint]]
name = "github.com/tdewolff/minify"
version = "^2.3.5"
[[constraint]]
branch = "master"
name = "github.com/BurntSushi/locker"
[[constraint]]
branch = "master"
name = "github.com/mitchellh/hashstructure"

View File

@ -1,8 +1,14 @@
image: Visual Studio 2015
init:
- set PATH=%PATH%;C:\MinGW\bin;%GOPATH%\bin
- set PATH=%PATH%;C:\mingw-w64\x86_64-7.3.0-posix-seh-rt_v5-rev0\mingw64\bin;%GOPATH%\bin
- go version
- go env
environment:
GOPATH: C:\GOPATH\
HUGO_BUILD_TAGS: extended
# clones and cd's to path
clone_folder: C:\GOPATH\src\github.com\gohugoio\hugo

View File

@ -16,6 +16,7 @@ package commands
import (
"os"
"path/filepath"
"regexp"
"sync"
"time"
@ -46,6 +47,10 @@ type commandeerHugoState struct {
type commandeer struct {
*commandeerHugoState
// Currently only set when in "fast render mode". But it seems to
// be fast enough that we could maybe just add it for all server modes.
changeDetector *fileChangeDetector
// We need to reuse this on server rebuilds.
destinationFs afero.Fs
@ -105,6 +110,68 @@ func newCommandeer(mustHaveConfigFile, running bool, h *hugoBuilderCommon, f fla
return c, c.loadConfig(mustHaveConfigFile, running)
}
type fileChangeDetector struct {
sync.Mutex
current map[string]string
prev map[string]string
irrelevantRe *regexp.Regexp
}
func (f *fileChangeDetector) OnFileClose(name, md5sum string) {
f.Lock()
defer f.Unlock()
f.current[name] = md5sum
}
func (f *fileChangeDetector) changed() []string {
if f == nil {
return nil
}
f.Lock()
defer f.Unlock()
var c []string
for k, v := range f.current {
vv, found := f.prev[k]
if !found || v != vv {
c = append(c, k)
}
}
return f.filterIrrelevant(c)
}
func (f *fileChangeDetector) filterIrrelevant(in []string) []string {
var filtered []string
for _, v := range in {
if !f.irrelevantRe.MatchString(v) {
filtered = append(filtered, v)
}
}
return filtered
}
func (f *fileChangeDetector) PrepareNew() {
if f == nil {
return
}
f.Lock()
defer f.Unlock()
if f.current == nil {
f.current = make(map[string]string)
f.prev = make(map[string]string)
return
}
f.prev = make(map[string]string)
for k, v := range f.current {
f.prev[k] = v
}
f.current = make(map[string]string)
}
func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
if c.DepsCfg == nil {
@ -202,6 +269,23 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
fs.Destination = new(afero.MemMapFs)
}
doLiveReload := !c.h.buildWatch && !config.GetBool("disableLiveReload")
fastRenderMode := doLiveReload && !config.GetBool("disableFastRender")
if fastRenderMode {
// For now, fast render mode only. It should, however, be fast enough
// for the full variant, too.
changeDetector := &fileChangeDetector{
// We use this detector to decide to do a Hot reload of a single path or not.
// We need to filter out source maps and possibly some other to be able
// to make that decision.
irrelevantRe: regexp.MustCompile(`\.map$`),
}
changeDetector.PrepareNew()
fs.Destination = hugofs.NewHashingFs(fs.Destination, changeDetector)
c.changeDetector = changeDetector
}
err = c.initFs(fs)
if err != nil {
return

View File

@ -474,6 +474,10 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6
return numFiles, err
}
func (c *commandeer) firstPathSpec() *helpers.PathSpec {
return c.hugo.Sites[0].PathSpec
}
func (c *commandeer) timeTrack(start time.Time, name string) {
if c.h.quiet {
return
@ -552,8 +556,8 @@ func (c *commandeer) getDirList() ([]string, error) {
// SymbolicWalk will log anny ERRORs
// Also note that the Dirnames fetched below will contain any relevant theme
// directories.
for _, contentDir := range c.hugo.PathSpec.BaseFs.AbsContentDirs {
_ = helpers.SymbolicWalk(c.Fs.Source, contentDir.Value, symLinkWalker)
for _, contentDir := range c.hugo.PathSpec.BaseFs.Content.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, contentDir, symLinkWalker)
}
for _, staticDir := range c.hugo.PathSpec.BaseFs.Data.Dirnames {
@ -574,6 +578,10 @@ func (c *commandeer) getDirList() ([]string, error) {
}
}
for _, assetDir := range c.hugo.PathSpec.BaseFs.Assets.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, assetDir, regularWalker)
}
if len(nested) > 0 {
for {
@ -818,13 +826,11 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
// Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized
// force refresh when more than one file
if len(staticEvents) > 0 {
for _, ev := range staticEvents {
if len(staticEvents) == 1 {
ev := staticEvents[0]
path := c.hugo.BaseFs.SourceFilesystems.MakeStaticPathRelative(ev.Name)
path = c.firstPathSpec().RelURL(helpers.ToSlashTrimLeading(path), false)
livereload.RefreshPath(path)
}
} else {
livereload.ForceRefresh()
}
@ -832,18 +838,38 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
}
if len(dynamicEvents) > 0 {
partitionedEvents := partitionDynamicEvents(
c.firstPathSpec().BaseFs.SourceFilesystems,
dynamicEvents)
doLiveReload := !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload")
onePageName := pickOneWriteOrCreatePath(dynamicEvents)
onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents)
c.Logger.FEEDBACK.Println("\nChange detected, rebuilding site")
const layout = "2006-01-02 15:04:05.000 -0700"
c.Logger.FEEDBACK.Println(time.Now().Format(layout))
c.changeDetector.PrepareNew()
if err := c.rebuildSites(dynamicEvents); err != nil {
c.Logger.ERROR.Println("Failed to rebuild site:", err)
}
if doLiveReload {
if len(partitionedEvents.ContentEvents) == 0 && len(partitionedEvents.AssetEvents) > 0 {
changed := c.changeDetector.changed()
if c.changeDetector != nil && len(changed) == 0 {
// Nothing has changed.
continue
} else if len(changed) == 1 {
pathToRefresh := c.firstPathSpec().RelURL(helpers.ToSlashTrimLeading(changed[0]), false)
livereload.RefreshPath(pathToRefresh)
} else {
livereload.ForceRefresh()
}
}
if len(partitionedEvents.ContentEvents) > 0 {
navigate := c.Cfg.GetBool("navigateToChanged")
// We have fetched the same page above, but it may have
// changed.
@ -853,7 +879,6 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
if onePageName != "" {
p = c.hugo.GetContentPage(onePageName)
}
}
if p != nil {
@ -863,6 +888,7 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
}
}
}
}
case err := <-watcher.Errors:
if err != nil {
c.Logger.ERROR.Println(err)
@ -874,6 +900,26 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
return watcher, nil
}
// dynamicEvents contains events that is considered dynamic, as in "not static".
// Both of these categories will trigger a new build, but the asset events
// does not fit into the "navigate to changed" logic.
type dynamicEvents struct {
ContentEvents []fsnotify.Event
AssetEvents []fsnotify.Event
}
func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fsnotify.Event) (de dynamicEvents) {
for _, e := range events {
if sourceFs.IsAsset(e.Name) {
de.AssetEvents = append(de.AssetEvents, e)
} else {
de.ContentEvents = append(de.ContentEvents, e)
}
}
return
}
func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
name := ""

23
common/errors/errors.go Normal file
View File

@ -0,0 +1,23 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package errors contains common Hugo errors and error related utilities.
package errors
import (
"errors"
)
// We will, at least to begin with, make some Hugo features (SCSS with libsass) optional,
// and this error is used to signal those situations.
var FeatureNotAvailableErr = errors.New("this feature is not available in your current Hugo version")

View File

@ -134,7 +134,7 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, kind, targetPath, archetypeFile
return nil, fmt.Errorf("Failed to parse archetype file %q: %s", archetypeFilename, err)
}
templ := templateHandler.Lookup(templateName)
templ, _ := templateHandler.Lookup(templateName)
var buff bytes.Buffer
if err := templ.Execute(&buff, data); err != nil {

View File

@ -88,6 +88,8 @@ func initViper(v *viper.Viper) {
v.Set("i18nDir", "i18n")
v.Set("theme", "sample")
v.Set("archetypeDir", "archetypes")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
}
func initFs(fs *hugofs.Fs) error {
@ -191,6 +193,7 @@ func newTestCfg() (*viper.Viper, *hugofs.Fs) {
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("archetypeDir", "archetypes")
v.Set("assetDir", "assets")
fs := hugofs.NewMem(v)

32
deps/deps.go vendored
View File

@ -1,17 +1,18 @@
package deps
import (
"io/ioutil"
"log"
"os"
"time"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/metrics"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/tpl"
jww "github.com/spf13/jwalterweatherman"
@ -30,6 +31,9 @@ type Deps struct {
// The templates to use. This will usually implement the full tpl.TemplateHandler.
Tmpl tpl.TemplateFinder `json:"-"`
// We use this to parse and execute ad-hoc text templates.
TextTmpl tpl.TemplateParseFinder `json:"-"`
// The file systems to use.
Fs *hugofs.Fs `json:"-"`
@ -42,6 +46,9 @@ type Deps struct {
// The SourceSpec to use
SourceSpec *source.SourceSpec `json:"-"`
// The Resource Spec to use
ResourceSpec *resource.Spec
// The configuration to use
Cfg config.Provider `json:"-"`
@ -115,7 +122,7 @@ func New(cfg DepsCfg) (*Deps, error) {
}
if logger == nil {
logger = jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
logger = loggers.NewErrorLogger()
}
if fs == nil {
@ -129,6 +136,11 @@ func New(cfg DepsCfg) (*Deps, error) {
return nil, err
}
resourceSpec, err := resource.NewSpec(ps, logger, cfg.MediaTypes)
if err != nil {
return nil, err
}
contentSpec, err := helpers.NewContentSpec(cfg.Language)
if err != nil {
return nil, err
@ -153,6 +165,7 @@ func New(cfg DepsCfg) (*Deps, error) {
PathSpec: ps,
ContentSpec: contentSpec,
SourceSpec: sp,
ResourceSpec: resourceSpec,
Cfg: cfg.Language,
Language: cfg.Language,
Timeout: time.Duration(timeoutms) * time.Millisecond,
@ -167,7 +180,8 @@ func New(cfg DepsCfg) (*Deps, error) {
// ForLanguage creates a copy of the Deps with the language dependent
// parts switched out.
func (d Deps) ForLanguage(l *langs.Language) (*Deps, error) {
func (d Deps) ForLanguage(cfg DepsCfg) (*Deps, error) {
l := cfg.Language
var err error
d.PathSpec, err = helpers.NewPathSpecWithBaseBaseFsProvided(d.Fs, l, d.BaseFs)
@ -180,6 +194,11 @@ func (d Deps) ForLanguage(l *langs.Language) (*Deps, error) {
return nil, err
}
d.ResourceSpec, err = resource.NewSpec(d.PathSpec, d.Log, cfg.MediaTypes)
if err != nil {
return nil, err
}
d.Cfg = l
d.Language = l
@ -212,6 +231,9 @@ type DepsCfg struct {
// The configuration to use.
Cfg config.Provider
// The media types configured.
MediaTypes media.Types
// Template handling.
TemplateProvider ResourceProvider
WithTemplate func(templ tpl.TemplateHandler) error

View File

@ -356,7 +356,7 @@ func MD5String(f string) string {
// MD5FromFileFast creates a MD5 hash from the given file. It only reads parts of
// the file for speed, so don't use it if the files are very subtly different.
// It will not close the file.
func MD5FromFileFast(f afero.File) (string, error) {
func MD5FromFileFast(r io.ReadSeeker) (string, error) {
const (
// Do not change once set in stone!
maxChunks = 8
@ -369,7 +369,7 @@ func MD5FromFileFast(f afero.File) (string, error) {
for i := 0; i < maxChunks; i++ {
if i > 0 {
_, err := f.Seek(seek, 0)
_, err := r.Seek(seek, 0)
if err != nil {
if err == io.EOF {
break
@ -378,7 +378,7 @@ func MD5FromFileFast(f afero.File) (string, error) {
}
}
_, err := io.ReadAtLeast(f, buff, peekSize)
_, err := io.ReadAtLeast(r, buff, peekSize)
if err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
h.Write(buff)

View File

@ -90,6 +90,11 @@ func (p *PathSpec) MakePathSanitized(s string) string {
return strings.ToLower(p.MakePath(s))
}
// ToSlashTrimLeading is just a filepath.ToSlaas with an added / prefix trimmer.
func ToSlashTrimLeading(s string) string {
return strings.TrimPrefix(filepath.ToSlash(s), "/")
}
// MakeTitle converts the path given to a suitable title, trimming whitespace
// and replacing hyphens with whitespace.
func MakeTitle(inpath string) string {
@ -222,12 +227,22 @@ func GetDottedRelativePath(inPath string) string {
return dottedPath
}
// ExtNoDelimiter takes a path and returns the extension, excluding the delmiter, i.e. "md".
func ExtNoDelimiter(in string) string {
return strings.TrimPrefix(Ext(in), ".")
}
// Ext takes a path and returns the extension, including the delmiter, i.e. ".md".
func Ext(in string) string {
_, ext := fileAndExt(in, fpb)
return ext
}
// PathAndExt is the same as FileAndExt, but it uses the path package.
func PathAndExt(in string) (string, string) {
return fileAndExt(in, pb)
}
// FileAndExt takes a path and returns the file and extension separated,
// the extension including the delmiter, i.e. ".md".
func FileAndExt(in string) (string, string) {

View File

@ -78,6 +78,9 @@ func TestMakePathSanitized(t *testing.T) {
v.Set("dataDir", "data")
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
v.Set("archetypeDir", "archetypes")
l := langs.NewDefaultLanguage(v)
@ -475,6 +478,7 @@ func createTempDirWithNonZeroLengthFiles() (string, error) {
return "", fileErr
}
byteString := []byte("byteString")
fileErr = ioutil.WriteFile(f.Name(), byteString, 0644)
if fileErr != nil {
// delete the file
@ -585,6 +589,11 @@ func TestAbsPathify(t *testing.T) {
}
func TestExtNoDelimiter(t *testing.T) {
assert := require.New(t)
assert.Equal("json", ExtNoDelimiter(filepath.FromSlash("/my/data.json")))
}
func TestFilename(t *testing.T) {
type test struct {
input, expected string

View File

@ -38,6 +38,9 @@ func newTestCfg() *viper.Viper {
v.Set("dataDir", "data")
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
v.Set("archetypeDir", "archetypes")
return v
}

View File

@ -0,0 +1,84 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"os"
"github.com/spf13/afero"
)
// RealFilenameInfo is a thin wrapper around os.FileInfo adding the real filename.
type RealFilenameInfo interface {
os.FileInfo
// This is the real filename to the file in the underlying filesystem.
RealFilename() string
}
type realFilenameInfo struct {
os.FileInfo
realFilename string
}
func (f *realFilenameInfo) RealFilename() string {
return f.realFilename
}
func NewBasePathRealFilenameFs(base *afero.BasePathFs) *BasePathRealFilenameFs {
return &BasePathRealFilenameFs{BasePathFs: base}
}
// This is a thin wrapper around afero.BasePathFs that provides the real filename
// in Stat and LstatIfPossible.
type BasePathRealFilenameFs struct {
*afero.BasePathFs
}
func (b *BasePathRealFilenameFs) Stat(name string) (os.FileInfo, error) {
fi, err := b.BasePathFs.Stat(name)
if err != nil {
return nil, err
}
if _, ok := fi.(RealFilenameInfo); ok {
return fi, nil
}
filename, err := b.RealPath(name)
if err != nil {
return nil, &os.PathError{Op: "stat", Path: name, Err: err}
}
return &realFilenameInfo{FileInfo: fi, realFilename: filename}, nil
}
func (b *BasePathRealFilenameFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, ok, err := b.BasePathFs.LstatIfPossible(name)
if err != nil {
return nil, false, err
}
if _, ok := fi.(RealFilenameInfo); ok {
return fi, ok, nil
}
filename, err := b.RealPath(name)
if err != nil {
return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err}
}
return &realFilenameInfo{FileInfo: fi, realFilename: filename}, ok, nil
}

96
hugofs/hashing_fs.go Normal file
View File

@ -0,0 +1,96 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"crypto/md5"
"encoding/hex"
"hash"
"os"
"github.com/spf13/afero"
)
var (
_ afero.Fs = (*md5HashingFs)(nil)
)
// FileHashReceiver will receive the filename an the content's MD5 sum on file close.
type FileHashReceiver interface {
OnFileClose(name, md5sum string)
}
type md5HashingFs struct {
afero.Fs
hashReceiver FileHashReceiver
}
// NewHashingFs creates a new filesystem that will receive MD5 checksums of
// any written file content on Close. Note that this is probably not a good
// idea for "full build" situations, but when doing fast render mode, the amount
// of files published is low, and it would be really nice to know exactly which
// of these files where actually changed.
// Note that this will only work for file operations that use the io.Writer
// to write content to file, but that is fine for the "publish content" use case.
func NewHashingFs(delegate afero.Fs, hashReceiver FileHashReceiver) afero.Fs {
return &md5HashingFs{Fs: delegate, hashReceiver: hashReceiver}
}
func (fs *md5HashingFs) Create(name string) (afero.File, error) {
f, err := fs.Fs.Create(name)
if err == nil {
f = fs.wrapFile(f)
}
return f, err
}
func (fs *md5HashingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
f, err := fs.Fs.OpenFile(name, flag, perm)
if err == nil && isWrite(flag) {
f = fs.wrapFile(f)
}
return f, err
}
func (fs *md5HashingFs) wrapFile(f afero.File) afero.File {
return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver}
}
func isWrite(flag int) bool {
return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
}
func (fs *md5HashingFs) Name() string {
return "md5HashingFs"
}
type hashingFile struct {
hashReceiver FileHashReceiver
h hash.Hash
afero.File
}
func (h *hashingFile) Write(p []byte) (n int, err error) {
n, err = h.File.Write(p)
if err != nil {
return
}
return h.h.Write(p)
}
func (h *hashingFile) Close() error {
sum := hex.EncodeToString(h.h.Sum(nil))
h.hashReceiver.OnFileClose(h.Name(), sum)
return h.File.Close()
}

53
hugofs/hashing_fs_test.go Normal file
View File

@ -0,0 +1,53 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"testing"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
type testHashReceiver struct {
sum string
name string
}
func (t *testHashReceiver) OnFileClose(name, md5hash string) {
t.name = name
t.sum = md5hash
}
func TestHashingFs(t *testing.T) {
assert := require.New(t)
fs := afero.NewMemMapFs()
observer := &testHashReceiver{}
ofs := NewHashingFs(fs, observer)
f, err := ofs.Create("hashme")
assert.NoError(err)
_, err = f.Write([]byte("content"))
assert.NoError(err)
assert.NoError(f.Close())
assert.Equal("9a0364b9e99bb480dd25e1f0284c8555", observer.sum)
assert.Equal("hashme", observer.name)
f, err = ofs.Create("nowrites")
assert.NoError(err)
assert.NoError(f.Close())
assert.Equal("d41d8cd98f00b204e9800998ecf8427e", observer.sum)
}

View File

@ -59,13 +59,14 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
t = "alias-xhtml"
}
var templ *tpl.TemplateAdapter
var templ tpl.Template
var found bool
if a.t != nil {
templ = a.t.Lookup("alias.html")
templ, found = a.t.Lookup("alias.html")
}
if templ == nil {
if !found {
def := defaultAliasTemplates.Lookup(t)
if def != nil {
templ = &tpl.TemplateAdapter{Template: def}

View File

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@ -411,6 +411,7 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("metaDataFormat", "toml")
v.SetDefault("contentDir", "content")
v.SetDefault("layoutDir", "layouts")
v.SetDefault("assetDir", "assets")
v.SetDefault("staticDir", "static")
v.SetDefault("resourceDir", "resources")
v.SetDefault("archetypeDir", "archetypes")

View File

@ -28,7 +28,6 @@ import (
"fmt"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/hugolib/paths"
"github.com/gohugoio/hugo/langs"
"github.com/spf13/afero"
@ -45,20 +44,10 @@ var filePathSeparator = string(filepath.Separator)
// to underline that even if they can be composites, they all have a base path set to a specific
// resource folder, e.g "/my-project/content". So, no absolute filenames needed.
type BaseFs struct {
// TODO(bep) make this go away
AbsContentDirs []types.KeyValueStr
// The filesystem used to capture content. This can be a composite and
// language aware file system.
ContentFs afero.Fs
// SourceFilesystems contains the different source file systems.
*SourceFilesystems
// The filesystem used to store resources (processed images etc.).
// This usually maps to /my-project/resources.
ResourcesFs afero.Fs
// The filesystem used to publish the rendered site.
// This usually maps to /my-project/public.
PublishFs afero.Fs
@ -71,35 +60,31 @@ type BaseFs struct {
// RelContentDir tries to create a path relative to the content root from
// the given filename. The return value is the path and language code.
func (b *BaseFs) RelContentDir(filename string) (string, string) {
for _, dir := range b.AbsContentDirs {
if strings.HasPrefix(filename, dir.Value) {
rel := strings.TrimPrefix(filename, dir.Value)
return strings.TrimPrefix(rel, filePathSeparator), dir.Key
func (b *BaseFs) RelContentDir(filename string) string {
for _, dirname := range b.SourceFilesystems.Content.Dirnames {
if strings.HasPrefix(filename, dirname) {
rel := strings.TrimPrefix(filename, dirname)
return strings.TrimPrefix(rel, filePathSeparator)
}
}
// Either not a content dir or already relative.
return filename, ""
}
// IsContent returns whether the given filename is in the content filesystem.
func (b *BaseFs) IsContent(filename string) bool {
for _, dir := range b.AbsContentDirs {
if strings.HasPrefix(filename, dir.Value) {
return true
}
}
return false
return filename
}
// SourceFilesystems contains the different source file systems. These can be
// composite file systems (theme and project etc.), and they have all root
// set to the source type the provides: data, i18n, static, layouts.
type SourceFilesystems struct {
Content *SourceFilesystem
Data *SourceFilesystem
I18n *SourceFilesystem
Layouts *SourceFilesystem
Archetypes *SourceFilesystem
Assets *SourceFilesystem
Resources *SourceFilesystem
// This is a unified read-only view of the project's and themes' workdir.
Work *SourceFilesystem
// When in multihost we have one static filesystem per language. The sync
// static files is currently done outside of the Hugo build (where there is
@ -112,8 +97,14 @@ type SourceFilesystems struct {
// i18n, layouts, static) and additional metadata to be able to use that filesystem
// in server mode.
type SourceFilesystem struct {
// This is a virtual composite filesystem. It expects path relative to a context.
Fs afero.Fs
// This is the base source filesystem. In real Hugo, this will be the OS filesystem.
// Use this if you need to resolve items in Dirnames below.
SourceFs afero.Fs
// Dirnames is absolute filenames to the directories in this filesystem.
Dirnames []string
// When syncing a source folder to the target (e.g. /public), this may
@ -122,6 +113,50 @@ type SourceFilesystem struct {
PublishFolder string
}
// ContentStaticAssetFs will create a new composite filesystem from the content,
// static, and asset filesystems. The site language is needed to pick the correct static filesystem.
// The order is content, static and then assets.
// TODO(bep) check usage
func (s SourceFilesystems) ContentStaticAssetFs(lang string) afero.Fs {
staticFs := s.StaticFs(lang)
base := afero.NewCopyOnWriteFs(s.Assets.Fs, staticFs)
return afero.NewCopyOnWriteFs(base, s.Content.Fs)
}
// StaticFs returns the static filesystem for the given language.
// This can be a composite filesystem.
func (s SourceFilesystems) StaticFs(lang string) afero.Fs {
var staticFs afero.Fs = hugofs.NoOpFs
if fs, ok := s.Static[lang]; ok {
staticFs = fs.Fs
} else if fs, ok := s.Static[""]; ok {
staticFs = fs.Fs
}
return staticFs
}
// StatResource looks for a resource in these filesystems in order: static, assets and finally content.
// If found in any of them, it returns FileInfo and the relevant filesystem.
// Any non os.IsNotExist error will be returned.
// An os.IsNotExist error wil be returned only if all filesystems return such an error.
// Note that if we only wanted to find the file, we could create a composite Afero fs,
// but we also need to know which filesystem root it lives in.
func (s SourceFilesystems) StatResource(lang, filename string) (fi os.FileInfo, fs afero.Fs, err error) {
for _, fsToCheck := range []afero.Fs{s.StaticFs(lang), s.Assets.Fs, s.Content.Fs} {
fs = fsToCheck
fi, err = fs.Stat(filename)
if err == nil || !os.IsNotExist(err) {
return
}
}
// Not found.
return
}
// IsStatic returns true if the given filename is a member of one of the static
// filesystems.
func (s SourceFilesystems) IsStatic(filename string) bool {
@ -133,6 +168,11 @@ func (s SourceFilesystems) IsStatic(filename string) bool {
return false
}
// IsContent returns true if the given filename is a member of the content filesystem.
func (s SourceFilesystems) IsContent(filename string) bool {
return s.Content.Contains(filename)
}
// IsLayout returns true if the given filename is a member of the layouts filesystem.
func (s SourceFilesystems) IsLayout(filename string) bool {
return s.Layouts.Contains(filename)
@ -143,6 +183,11 @@ func (s SourceFilesystems) IsData(filename string) bool {
return s.Data.Contains(filename)
}
// IsAsset returns true if the given filename is a member of the data filesystem.
func (s SourceFilesystems) IsAsset(filename string) bool {
return s.Assets.Contains(filename)
}
// IsI18n returns true if the given filename is a member of the i18n filesystem.
func (s SourceFilesystems) IsI18n(filename string) bool {
return s.I18n.Contains(filename)
@ -171,6 +216,18 @@ func (d *SourceFilesystem) MakePathRelative(filename string) string {
return ""
}
func (d *SourceFilesystem) RealFilename(rel string) string {
fi, err := d.Fs.Stat(rel)
if err != nil {
return rel
}
if realfi, ok := fi.(hugofs.RealFilenameInfo); ok {
return realfi.RealFilename()
}
return rel
}
// Contains returns whether the given filename is a member of the current filesystem.
func (d *SourceFilesystem) Contains(filename string) bool {
for _, dir := range d.Dirnames {
@ -181,6 +238,20 @@ func (d *SourceFilesystem) Contains(filename string) bool {
return false
}
// RealDirs gets a list of absolute paths to directorys starting from the given
// path.
func (d *SourceFilesystem) RealDirs(from string) []string {
var dirnames []string
for _, dir := range d.Dirnames {
dirname := filepath.Join(dir, from)
if _, err := hugofs.Os.Stat(dirname); err == nil {
dirnames = append(dirnames, dirname)
}
}
return dirnames
}
// WithBaseFs allows reuse of some potentially expensive to create parts that remain
// the same across sites/languages.
func WithBaseFs(b *BaseFs) func(*BaseFs) error {
@ -191,11 +262,15 @@ func WithBaseFs(b *BaseFs) func(*BaseFs) error {
}
}
func newRealBase(base afero.Fs) afero.Fs {
return hugofs.NewBasePathRealFilenameFs(base.(*afero.BasePathFs))
}
// NewBase builds the filesystems used by Hugo given the paths and options provided.NewBase
func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) {
fs := p.Fs
resourcesFs := afero.NewBasePathFs(fs.Source, p.AbsResourcesDir)
publishFs := afero.NewBasePathFs(fs.Destination, p.AbsPublishDir)
contentFs, absContentDirs, err := createContentFs(fs.Source, p.WorkingDir, p.DefaultContentLanguage, p.Languages)
@ -209,16 +284,13 @@ func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) {
if i == j {
continue
}
if strings.HasPrefix(d1.Value, d2.Value) || strings.HasPrefix(d2.Value, d1.Value) {
if strings.HasPrefix(d1, d2) || strings.HasPrefix(d2, d1) {
return nil, fmt.Errorf("found overlapping content dirs (%q and %q)", d1, d2)
}
}
}
b := &BaseFs{
AbsContentDirs: absContentDirs,
ContentFs: contentFs,
ResourcesFs: resourcesFs,
PublishFs: publishFs,
}
@ -234,6 +306,12 @@ func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) {
return nil, err
}
sourceFilesystems.Content = &SourceFilesystem{
SourceFs: fs.Source,
Fs: contentFs,
Dirnames: absContentDirs,
}
b.SourceFilesystems = sourceFilesystems
b.themeFs = builder.themeFs
b.AbsThemeDirs = builder.absThemeDirs
@ -281,18 +359,39 @@ func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) {
}
b.result.I18n = sfs
sfs, err = b.createFs("layoutDir", "layouts")
sfs, err = b.createFs(false, true, "layoutDir", "layouts")
if err != nil {
return nil, err
}
b.result.Layouts = sfs
sfs, err = b.createFs("archetypeDir", "archetypes")
sfs, err = b.createFs(false, true, "archetypeDir", "archetypes")
if err != nil {
return nil, err
}
b.result.Archetypes = sfs
sfs, err = b.createFs(false, true, "assetDir", "assets")
if err != nil {
return nil, err
}
b.result.Assets = sfs
sfs, err = b.createFs(true, false, "resourceDir", "resources")
if err != nil {
return nil, err
}
b.result.Resources = sfs
err = b.createStaticFs()
sfs, err = b.createFs(false, true, "", "")
if err != nil {
return nil, err
}
b.result.Work = sfs
err = b.createStaticFs()
if err != nil {
return nil, err
@ -301,23 +400,38 @@ func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) {
return b.result, nil
}
func (b *sourceFilesystemsBuilder) createFs(dirKey, themeFolder string) (*SourceFilesystem, error) {
s := &SourceFilesystem{}
dir := b.p.Cfg.GetString(dirKey)
func (b *sourceFilesystemsBuilder) createFs(
mkdir bool,
readOnly bool,
dirKey, themeFolder string) (*SourceFilesystem, error) {
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
}
var dir string
if dirKey != "" {
dir = b.p.Cfg.GetString(dirKey)
if dir == "" {
return s, fmt.Errorf("config %q not set", dirKey)
}
}
var fs afero.Fs
absDir := b.p.AbsPathify(dir)
if b.existsInSource(absDir) {
fs = afero.NewBasePathFs(b.p.Fs.Source, absDir)
existsInSource := b.existsInSource(absDir)
if !existsInSource && mkdir {
// We really need this directory. Make it.
if err := b.p.Fs.Source.MkdirAll(absDir, 0777); err == nil {
existsInSource = true
}
}
if existsInSource {
fs = newRealBase(afero.NewBasePathFs(b.p.Fs.Source, absDir))
s.Dirnames = []string{absDir}
}
if b.hasTheme {
themeFolderFs := afero.NewBasePathFs(b.themeFs, themeFolder)
themeFolderFs := newRealBase(afero.NewBasePathFs(b.themeFs, themeFolder))
if fs == nil {
fs = themeFolderFs
} else {
@ -334,8 +448,10 @@ func (b *sourceFilesystemsBuilder) createFs(dirKey, themeFolder string) (*Source
if fs == nil {
s.Fs = hugofs.NoOpFs
} else {
} else if readOnly {
s.Fs = afero.NewReadOnlyFs(fs)
} else {
s.Fs = fs
}
return s, nil
@ -344,7 +460,9 @@ func (b *sourceFilesystemsBuilder) createFs(dirKey, themeFolder string) (*Source
// Used for data, i18n -- we cannot use overlay filsesystems for those, but we need
// to keep a strict order.
func (b *sourceFilesystemsBuilder) createRootMappingFs(dirKey, themeFolder string) (*SourceFilesystem, error) {
s := &SourceFilesystem{}
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
}
projectDir := b.p.Cfg.GetString(dirKey)
if projectDir == "" {
@ -396,7 +514,9 @@ func (b *sourceFilesystemsBuilder) createStaticFs() error {
if isMultihost {
for _, l := range b.p.Languages {
s := &SourceFilesystem{PublishFolder: l.Lang}
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
PublishFolder: l.Lang}
staticDirs := removeDuplicatesKeepRight(getStaticDirs(l))
if len(staticDirs) == 0 {
continue
@ -424,7 +544,10 @@ func (b *sourceFilesystemsBuilder) createStaticFs() error {
return nil
}
s := &SourceFilesystem{}
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
}
var staticDirs []string
for _, l := range b.p.Languages {
@ -451,7 +574,7 @@ func (b *sourceFilesystemsBuilder) createStaticFs() error {
if b.hasTheme {
themeFolder := "static"
fs = afero.NewCopyOnWriteFs(afero.NewBasePathFs(b.themeFs, themeFolder), fs)
fs = afero.NewCopyOnWriteFs(newRealBase(afero.NewBasePathFs(b.themeFs, themeFolder)), fs)
for _, absThemeDir := range b.absThemeDirs {
s.Dirnames = append(s.Dirnames, filepath.Join(absThemeDir, themeFolder))
}
@ -484,7 +607,7 @@ func getStringOrStringSlice(cfg config.Provider, key string, id int) []string {
func createContentFs(fs afero.Fs,
workingDir,
defaultContentLanguage string,
languages langs.Languages) (afero.Fs, []types.KeyValueStr, error) {
languages langs.Languages) (afero.Fs, []string, error) {
var contentLanguages langs.Languages
var contentDirSeen = make(map[string]bool)
@ -511,7 +634,7 @@ func createContentFs(fs afero.Fs,
}
var absContentDirs []types.KeyValueStr
var absContentDirs []string
fs, err := createContentOverlayFs(fs, workingDir, contentLanguages, languageSet, &absContentDirs)
return fs, absContentDirs, err
@ -522,7 +645,7 @@ func createContentOverlayFs(source afero.Fs,
workingDir string,
languages langs.Languages,
languageSet map[string]bool,
absContentDirs *[]types.KeyValueStr) (afero.Fs, error) {
absContentDirs *[]string) (afero.Fs, error) {
if len(languages) == 0 {
return source, nil
}
@ -548,7 +671,7 @@ func createContentOverlayFs(source afero.Fs,
return nil, fmt.Errorf("invalid content dir %q: Path is too short", absContentDir)
}
*absContentDirs = append(*absContentDirs, types.KeyValueStr{Key: language.Lang, Value: absContentDir})
*absContentDirs = append(*absContentDirs, absContentDir)
overlay := hugofs.NewLanguageFs(language.Lang, languageSet, afero.NewBasePathFs(source, absContentDir))
if len(languages) == 1 {
@ -597,10 +720,10 @@ func createOverlayFs(source afero.Fs, absPaths []string) (afero.Fs, error) {
}
if len(absPaths) == 1 {
return afero.NewReadOnlyFs(afero.NewBasePathFs(source, absPaths[0])), nil
return afero.NewReadOnlyFs(newRealBase(afero.NewBasePathFs(source, absPaths[0]))), nil
}
base := afero.NewReadOnlyFs(afero.NewBasePathFs(source, absPaths[0]))
base := afero.NewReadOnlyFs(newRealBase(afero.NewBasePathFs(source, absPaths[0])))
overlay, err := createOverlayFs(source, absPaths[1:])
if err != nil {
return nil, err

View File

@ -60,6 +60,10 @@ theme = ["atheme"]
setConfigAndWriteSomeFilesTo(fs.Source, v, "staticDir", "mystatic", 6)
setConfigAndWriteSomeFilesTo(fs.Source, v, "dataDir", "mydata", 7)
setConfigAndWriteSomeFilesTo(fs.Source, v, "archetypeDir", "myarchetypes", 8)
setConfigAndWriteSomeFilesTo(fs.Source, v, "assetDir", "myassets", 9)
setConfigAndWriteSomeFilesTo(fs.Source, v, "resourceDir", "myrsesource", 10)
v.Set("publishDir", "public")
p, err := paths.New(fs, v)
assert.NoError(err)
@ -88,12 +92,15 @@ theme = ["atheme"]
_, err = ff.Readdirnames(-1)
assert.NoError(err)
checkFileCount(bfs.ContentFs, "", assert, 3)
checkFileCount(bfs.Content.Fs, "", assert, 3)
checkFileCount(bfs.I18n.Fs, "", assert, 6) // 4 + 2 themes
checkFileCount(bfs.Layouts.Fs, "", assert, 5)
checkFileCount(bfs.Static[""].Fs, "", assert, 6)
checkFileCount(bfs.Data.Fs, "", assert, 9) // 7 + 2 themes
checkFileCount(bfs.Archetypes.Fs, "", assert, 8)
checkFileCount(bfs.Assets.Fs, "", assert, 9)
checkFileCount(bfs.Resources.Fs, "", assert, 10)
checkFileCount(bfs.Work.Fs, "", assert, 57)
assert.Equal([]string{filepath.FromSlash("/my/work/mydata"), filepath.FromSlash("/my/work/themes/btheme/data"), filepath.FromSlash("/my/work/themes/atheme/data")}, bfs.Data.Dirnames)
@ -101,15 +108,16 @@ theme = ["atheme"]
assert.True(bfs.IsI18n(filepath.Join(workingDir, "myi18n", "file1.txt")))
assert.True(bfs.IsLayout(filepath.Join(workingDir, "mylayouts", "file1.txt")))
assert.True(bfs.IsStatic(filepath.Join(workingDir, "mystatic", "file1.txt")))
assert.True(bfs.IsAsset(filepath.Join(workingDir, "myassets", "file1.txt")))
contentFilename := filepath.Join(workingDir, "mycontent", "file1.txt")
assert.True(bfs.IsContent(contentFilename))
rel, _ := bfs.RelContentDir(contentFilename)
rel := bfs.RelContentDir(contentFilename)
assert.Equal("file1.txt", rel)
}
func TestNewBaseFsEmpty(t *testing.T) {
assert := require.New(t)
func createConfig() *viper.Viper {
v := viper.New()
v.Set("contentDir", "mycontent")
v.Set("i18nDir", "myi18n")
@ -117,18 +125,90 @@ func TestNewBaseFsEmpty(t *testing.T) {
v.Set("dataDir", "mydata")
v.Set("layoutDir", "mylayouts")
v.Set("archetypeDir", "myarchetypes")
v.Set("assetDir", "myassets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
return v
}
func TestNewBaseFsEmpty(t *testing.T) {
assert := require.New(t)
v := createConfig()
fs := hugofs.NewMem(v)
p, err := paths.New(fs, v)
assert.NoError(err)
bfs, err := NewBase(p)
assert.NoError(err)
assert.NotNil(bfs)
assert.Equal(hugofs.NoOpFs, bfs.Archetypes.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Layouts.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Data.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Assets.Fs)
assert.Equal(hugofs.NoOpFs, bfs.I18n.Fs)
assert.NotNil(hugofs.NoOpFs, bfs.ContentFs)
assert.NotNil(hugofs.NoOpFs, bfs.Static)
assert.NotNil(bfs.Work.Fs)
assert.NotNil(bfs.Content.Fs)
assert.NotNil(bfs.Static)
}
func TestRealDirs(t *testing.T) {
assert := require.New(t)
v := createConfig()
fs := hugofs.NewDefault(v)
sfs := fs.Source
root, err := afero.TempDir(sfs, "", "realdir")
assert.NoError(err)
themesDir, err := afero.TempDir(sfs, "", "themesDir")
assert.NoError(err)
defer func() {
os.RemoveAll(root)
os.RemoveAll(themesDir)
}()
v.Set("workingDir", root)
v.Set("contentDir", "content")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
v.Set("themesDir", themesDir)
v.Set("theme", "mytheme")
assert.NoError(sfs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf1"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf2"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(root, "resources"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "resources"), 0755))
assert.NoError(sfs.MkdirAll(filepath.Join(root, "myassets", "js", "f2"), 0755))
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf1", "a1.scss")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf2", "a3.scss")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "a2.scss")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2", "a3.scss")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3", "a4.scss")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "resources", "t1.txt")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "resources", "p1.txt")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "resources", "p2.txt")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0755)
p, err := paths.New(fs, v)
assert.NoError(err)
bfs, err := NewBase(p)
assert.NoError(err)
assert.NotNil(bfs)
checkFileCount(bfs.Assets.Fs, "", assert, 6)
realDirs := bfs.Assets.RealDirs("scss")
assert.Equal(2, len(realDirs))
assert.Equal(filepath.Join(root, "myassets/scss"), realDirs[0])
assert.Equal(filepath.Join(themesDir, "mytheme/assets/scss"), realDirs[len(realDirs)-1])
checkFileCount(bfs.Resources.Fs, "", assert, 3)
}
func checkFileCount(fs afero.Fs, dirname string, assert *require.Assertions, expected int) {

View File

@ -21,8 +21,6 @@ import (
"strings"
"sync"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/langs"
@ -182,8 +180,10 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
continue
}
if d == nil {
cfg.Language = s.Language
cfg.MediaTypes = s.mediaTypesConfig
if d == nil {
cfg.WithTemplate = s.withSiteTemplates(cfg.WithTemplate)
var err error
@ -200,7 +200,7 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
}
} else {
d, err = d.ForLanguage(s.Language)
d, err = d.ForLanguage(cfg)
if err != nil {
return err
}
@ -208,11 +208,6 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
s.Deps = d
}
s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig)
if err != nil {
return err
}
}
return nil
@ -701,7 +696,7 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
defer m.mu.RUnlock()
// Bundles share resources, so we need to start from the virtual root.
relPath, _ := m.pathSpec.RelContentDir(filename)
relPath := m.pathSpec.RelContentDir(filename)
dir, name := filepath.Split(relPath)
if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
dir += helpers.FilePathSeparator

View File

@ -461,7 +461,7 @@ func TestMultiSitesRebuild(t *testing.T) {
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
contentFs := b.H.BaseFs.ContentFs
contentFs := b.H.BaseFs.Content.Fs
for i, this := range []struct {
preFunc func(t *testing.T)
@ -698,7 +698,7 @@ title = "Svenska"
// Regular pages have no children
require.Len(t, svPage.Pages, 0)
require.Len(t, svPage.Data["Pages"], 0)
require.Len(t, svPage.data["Pages"], 0)
}

View File

@ -21,6 +21,8 @@ import (
"reflect"
"unicode"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/langs"
@ -228,7 +230,7 @@ type Page struct {
title string
Description string
Keywords []string
Data map[string]interface{}
data map[string]interface{}
pagemeta.PageDates
@ -239,7 +241,8 @@ type Page struct {
permalink string
relPermalink string
// relative target path without extension and any base path element from the baseURL.
// relative target path without extension and any base path element
// from the baseURL or the language code.
// This is used to construct paths in the page resources.
relTargetPathBase string
// Is set to a forward slashed path if this is a Page resources living in a folder below its owner.
@ -272,12 +275,16 @@ type Page struct {
targetPathDescriptorPrototype *targetPathDescriptor
}
func stackTrace() string {
trace := make([]byte, 2000)
func stackTrace(lenght int) string {
trace := make([]byte, lenght)
runtime.Stack(trace, true)
return string(trace)
}
func (p *Page) Data() interface{} {
return p.data
}
func (p *Page) initContent() {
p.contentInit.Do(func() {
@ -492,6 +499,10 @@ func (p *Page) BundleType() string {
return ""
}
func (p *Page) MediaType() media.Type {
return media.OctetType
}
type Source struct {
Frontmatter []byte
Content []byte
@ -1900,7 +1911,7 @@ func (p *Page) prepareLayouts() error {
func (p *Page) prepareData(s *Site) error {
if p.Kind != KindSection {
var pages Pages
p.Data = make(map[string]interface{})
p.data = make(map[string]interface{})
switch p.Kind {
case KindPage:
@ -1919,21 +1930,21 @@ func (p *Page) prepareData(s *Site) error {
singular := s.taxonomiesPluralSingular[plural]
taxonomy := s.Taxonomies[plural].Get(term)
p.Data[singular] = taxonomy
p.Data["Singular"] = singular
p.Data["Plural"] = plural
p.Data["Term"] = term
p.data[singular] = taxonomy
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Term"] = term
pages = taxonomy.Pages()
case KindTaxonomyTerm:
plural := p.sections[0]
singular := s.taxonomiesPluralSingular[plural]
p.Data["Singular"] = singular
p.Data["Plural"] = plural
p.Data["Terms"] = s.Taxonomies[plural]
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Terms"] = s.Taxonomies[plural]
// keep the following just for legacy reasons
p.Data["OrderedIndex"] = p.Data["Terms"]
p.Data["Index"] = p.Data["Terms"]
p.data["OrderedIndex"] = p.data["Terms"]
p.data["Index"] = p.data["Terms"]
// A list of all KindTaxonomy pages with matching plural
for _, p := range s.findPagesByKind(KindTaxonomy) {
@ -1943,7 +1954,7 @@ func (p *Page) prepareData(s *Site) error {
}
}
p.Data["Pages"] = pages
p.data["Pages"] = pages
p.Pages = pages
}

View File

@ -144,7 +144,7 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
return nil
}
for _, file := range files {
f, err := s.site.BaseFs.ContentFs.Open(file.Filename())
f, err := s.site.BaseFs.Content.Fs.Open(file.Filename())
if err != nil {
return fmt.Errorf("failed to open assets file: %s", err)
}

View File

@ -91,7 +91,7 @@ func TestPageBundlerCaptureSymlinks(t *testing.T) {
assert := require.New(t)
ps, workDir := newTestBundleSymbolicSources(t)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
fileStore := &storeFilenames{}
logger := loggers.NewErrorLogger()
@ -137,7 +137,7 @@ func TestPageBundlerCaptureBasic(t *testing.T) {
ps, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
fileStore := &storeFilenames{}
@ -183,7 +183,7 @@ func TestPageBundlerCaptureMultilingual(t *testing.T) {
ps, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
fileStore := &storeFilenames{}
c := newCapturer(loggers.NewErrorLogger(), sourceSpec, fileStore, nil)

View File

@ -326,9 +326,14 @@ func (c *contentHandlers) createResource() contentHandler {
return notHandled
}
resource, err := c.s.resourceSpec.NewResourceFromFilename(
ctx.parentPage.subResourceTargetPathFactory,
ctx.source.Filename(), ctx.target)
resource, err := c.s.ResourceSpec.New(
resource.ResourceSourceDescriptor{
TargetPathBuilder: ctx.parentPage.subResourceTargetPathFactory,
SourceFile: ctx.source,
RelTargetFilename: ctx.target,
URLBase: c.s.GetURLLanguageBasePath(),
TargetPathBase: c.s.GetTargetLanguageBasePath(),
})
return handlerResult{err: err, handled: true, resource: resource}
}
@ -336,7 +341,7 @@ func (c *contentHandlers) createResource() contentHandler {
func (c *contentHandlers) copyFile() contentHandler {
return func(ctx *handlerContext) handlerResult {
f, err := c.s.BaseFs.ContentFs.Open(ctx.source.Filename())
f, err := c.s.BaseFs.Content.Fs.Open(ctx.source.Filename())
if err != nil {
err := fmt.Errorf("failed to open file in copyFile: %s", err)
return handlerResult{err: err}

View File

@ -37,7 +37,6 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resource"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
@ -158,7 +157,6 @@ func TestPageBundlerSiteRegular(t *testing.T) {
altFormat := leafBundle1.OutputFormats().Get("CUSTOMO")
assert.NotNil(altFormat)
assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/c/logo.png"), image.(resource.Source).AbsSourceFilename())
assert.Equal("https://example.com/2017/pageslug/c/logo.png", image.Permalink())
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")

View File

@ -220,6 +220,6 @@ func (c *PageCollections) clearResourceCacheForPage(page *Page) {
dir := path.Dir(first.RelPermalink())
dir = strings.TrimPrefix(dir, page.LanguagePrefix())
// This is done to keep the memory usage in check when doing live reloads.
page.s.resourceSpec.DeleteCacheByPrefix(dir)
page.s.ResourceSpec.DeleteCacheByPrefix(dir)
}
}

View File

@ -20,6 +20,10 @@ import (
"strings"
"sync"
bp "github.com/gohugoio/hugo/bufferpool"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/media"
@ -119,15 +123,15 @@ func (p *PageOutput) Render(layout ...string) template.HTML {
}
for _, layout := range l {
templ := p.s.Tmpl.Lookup(layout)
if templ == nil {
templ, found := p.s.Tmpl.Lookup(layout)
if !found {
// This is legacy from when we had only one output format and
// HTML templates only. Some have references to layouts without suffix.
// We default to good old HTML.
templ = p.s.Tmpl.Lookup(layout + ".html")
templ, found = p.s.Tmpl.Lookup(layout + ".html")
}
if templ != nil {
res, err := templ.ExecuteToString(p)
res, err := executeToString(templ, p)
if err != nil {
p.s.DistinctErrorLog.Printf("in .Render: Failed to execute template %q: %s", layout, err)
return template.HTML("")
@ -140,6 +144,16 @@ func (p *PageOutput) Render(layout ...string) template.HTML {
}
func executeToString(templ tpl.Template, data interface{}) (string, error) {
b := bp.GetBuffer()
defer bp.PutBuffer(b)
if err := templ.Execute(b, data); err != nil {
return "", err
}
return b.String(), nil
}
func (p *Page) Render(layout ...string) template.HTML {
if p.mainPageOutput == nil {
panic(fmt.Sprintf("programming error: no mainPageOutput for %q", p.Path()))
@ -265,7 +279,7 @@ func (p *PageOutput) renderResources() error {
// mode when the same resource is member of different page bundles.
p.deleteResource(i)
} else {
p.s.Log.ERROR.Printf("Failed to publish %q for page %q: %s", src.AbsSourceFilename(), p.pathOrTitle(), err)
p.s.Log.ERROR.Printf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
}
} else {
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)

View File

@ -139,7 +139,11 @@ func (p *Page) initURLs() error {
return err
}
p.relTargetPathBase = strings.TrimSuffix(target, f.MediaType.FullSuffix())
p.relTargetPathBase = strings.TrimPrefix(strings.TrimSuffix(target, f.MediaType.FullSuffix()), "/")
if prefix := p.s.GetLanguagePrefix(); prefix != "" {
// Any language code in the path will be added later.
p.relTargetPathBase = strings.TrimPrefix(p.relTargetPathBase, prefix+"/")
}
p.relPermalink = p.s.PathSpec.PrependBasePath(rel)
p.layoutDescriptor = p.createLayoutDescriptor()
return nil

View File

@ -27,7 +27,7 @@ import (
func TestPageTargetPath(t *testing.T) {
pathSpec := newTestDefaultPathSpec()
pathSpec := newTestDefaultPathSpec(t)
noExtNoDelimMediaType := media.TextType
noExtNoDelimMediaType.Suffix = ""

View File

@ -289,7 +289,7 @@ func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
if p.s.owner.IsMultihost() {
pathDescriptor.LangPrefix = ""
}
pagers, err := paginatePages(pathDescriptor, p.Data["Pages"], pagerSize)
pagers, err := paginatePages(pathDescriptor, p.data["Pages"], pagerSize)
if err != nil {
initError = err

View File

@ -281,7 +281,7 @@ func doTestPaginator(t *testing.T, useViper bool) {
pages := createTestPages(s, 12)
n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
n1.Data["Pages"] = pages
n1.data["Pages"] = pages
var paginator1 *Pager
@ -301,7 +301,7 @@ func doTestPaginator(t *testing.T, useViper bool) {
require.Nil(t, err)
require.Equal(t, paginator2, paginator1.Next())
n1.Data["Pages"] = createTestPages(s, 1)
n1.data["Pages"] = createTestPages(s, 1)
samePaginator, _ := n1.Paginator()
require.Equal(t, paginator1, samePaginator)

View File

@ -27,13 +27,21 @@ type BaseURL struct {
}
func (b BaseURL) String() string {
if b.urlStr != "" {
return b.urlStr
}
return b.url.String()
}
func (b BaseURL) Path() string {
return b.url.Path
}
// HostURL returns the URL to the host root without any path elements.
func (b BaseURL) HostURL() string {
return strings.TrimSuffix(b.String(), b.Path())
}
// WithProtocol returns the BaseURL prefixed with the given protocol.
// The Protocol is normally of the form "scheme://", i.e. "webcal://".
func (b BaseURL) WithProtocol(protocol string) (string, error) {

View File

@ -58,4 +58,9 @@ func TestBaseURL(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "", b.String())
// BaseURL with sub path
b, err = newBaseURLFromString("http://example.com/sub")
require.NoError(t, err)
require.Equal(t, "http://example.com/sub", b.String())
require.Equal(t, "http://example.com", b.HostURL())
}

View File

@ -42,7 +42,10 @@ type Paths struct {
ContentDir string
ThemesDir string
WorkingDir string
// Directories to store Resource related artifacts.
AbsResourcesDir string
AbsPublishDir string
// pagination path handling
@ -79,12 +82,21 @@ func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
return nil, fmt.Errorf("Failed to create baseURL from %q: %s", baseURLstr, err)
}
// TODO(bep)
contentDir := cfg.GetString("contentDir")
workingDir := cfg.GetString("workingDir")
resourceDir := cfg.GetString("resourceDir")
publishDir := cfg.GetString("publishDir")
if contentDir == "" {
return nil, fmt.Errorf("contentDir not set")
}
if resourceDir == "" {
return nil, fmt.Errorf("resourceDir not set")
}
if publishDir == "" {
return nil, fmt.Errorf("publishDir not set")
}
defaultContentLanguage := cfg.GetString("defaultContentLanguage")
var (
@ -183,6 +195,21 @@ func (p *Paths) Themes() []string {
return p.themes
}
func (p *Paths) GetTargetLanguageBasePath() string {
if p.Languages.IsMultihost() {
// In a multihost configuration all assets will be published below the language code.
return p.Lang()
}
return p.GetLanguagePrefix()
}
func (p *Paths) GetURLLanguageBasePath() string {
if p.Languages.IsMultihost() {
return ""
}
return p.GetLanguagePrefix()
}
func (p *Paths) GetLanguagePrefix() string {
if !p.multilingual {
return ""

View File

@ -30,6 +30,10 @@ func TestNewPaths(t *testing.T) {
v.Set("defaultContentLanguageInSubdir", true)
v.Set("defaultContentLanguage", "no")
v.Set("multilingual", true)
v.Set("contentDir", "content")
v.Set("workingDir", "work")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
p, err := New(fs, v)
assert.NoError(err)

View File

@ -19,23 +19,29 @@ import (
"os"
"strings"
"github.com/gohugoio/hugo/helpers"
"github.com/spf13/afero"
)
// GC requires a build first.
func (h *HugoSites) GC() (int, error) {
s := h.Sites[0]
fs := h.PathSpec.BaseFs.ResourcesFs
fs := h.PathSpec.BaseFs.Resources.Fs
imageCacheDir := s.resourceSpec.GenImagePath
imageCacheDir := s.ResourceSpec.GenImagePath
if len(imageCacheDir) < 10 {
panic("invalid image cache")
}
assetsCacheDir := s.ResourceSpec.GenAssetsPath
if len(assetsCacheDir) < 10 {
panic("invalid assets cache")
}
isInUse := func(filename string) bool {
isImageInUse := func(filename string) bool {
key := strings.TrimPrefix(filename, imageCacheDir)
for _, site := range h.Sites {
if site.resourceSpec.IsInCache(key) {
if site.ResourceSpec.IsInImageCache(key) {
return true
}
}
@ -43,14 +49,27 @@ func (h *HugoSites) GC() (int, error) {
return false
}
counter := 0
isAssetInUse := func(filename string) bool {
key := strings.TrimPrefix(filename, assetsCacheDir)
// These assets are stored in tuplets with an added extension to the key.
key = strings.TrimSuffix(key, helpers.Ext(key))
for _, site := range h.Sites {
if site.ResourceSpec.ResourceCache.Contains(key) {
return true
}
}
err := afero.Walk(fs, imageCacheDir, func(path string, info os.FileInfo, err error) error {
return false
}
walker := func(dirname string, inUse func(filename string) bool) (int, error) {
counter := 0
err := afero.Walk(fs, dirname, func(path string, info os.FileInfo, err error) error {
if info == nil {
return nil
}
if !strings.HasPrefix(path, imageCacheDir) {
if !strings.HasPrefix(path, dirname) {
return fmt.Errorf("Invalid state, walk outside of resource dir: %q", path)
}
@ -69,7 +88,7 @@ func (h *HugoSites) GC() (int, error) {
return nil
}
inUse := isInUse(path)
inUse := inUse(path)
if !inUse {
err := fs.Remove(path)
if err != nil && !os.IsNotExist(err) {
@ -82,5 +101,16 @@ func (h *HugoSites) GC() (int, error) {
})
return counter, err
}
imageCounter, err1 := walker(imageCacheDir, isImageInUse)
assetsCounter, err2 := walker(assetsCacheDir, isAssetInUse)
totalCount := imageCounter + assetsCounter
if err1 != nil {
return totalCount, err1
}
return totalCount, err2
}

View File

@ -0,0 +1,210 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"path/filepath"
"testing"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/resource/tocss/scss"
)
func TestResourceChain(t *testing.T) {
t.Parallel()
tests := []struct {
name string
shouldRun func() bool
prepare func(b *sitesBuilder)
verify func(b *sitesBuilder)
}{
{"tocss", func() bool { return scss.Supports() }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
{{ $scss := resources.Get "scss/styles2.scss" | toCSS }}
{{ $sass := resources.Get "sass/styles3.sass" | toCSS }}
{{ $scssCustomTarget := resources.Get "scss/styles2.scss" | toCSS (dict "targetPath" "styles/main.css") }}
{{ $scssCustomTargetString := resources.Get "scss/styles2.scss" | toCSS "styles/main.css" }}
{{ $scssMin := resources.Get "scss/styles2.scss" | toCSS | minify }}
T1: Len Content: {{ len $scss.Content }}|RelPermalink: {{ $scss.RelPermalink }}|Permalink: {{ $scss.Permalink }}|MediaType: {{ $scss.MediaType.Type }}
T2: Content: {{ $scssMin.Content }}|RelPermalink: {{ $scssMin.RelPermalink }}
T3: Content: {{ len $scssCustomTarget.Content }}|RelPermalink: {{ $scssCustomTarget.RelPermalink }}|MediaType: {{ $scssCustomTarget.MediaType.Type }}
T4: Content: {{ len $scssCustomTargetString.Content }}|RelPermalink: {{ $scssCustomTargetString.RelPermalink }}|MediaType: {{ $scssCustomTargetString.MediaType.Type }}
T5: Content: {{ $sass.Content }}|T5 RelPermalink: {{ $sass.RelPermalink }}|
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `T1: Len Content: 24|RelPermalink: /scss/styles2.css|Permalink: http://example.com/scss/styles2.css|MediaType: text/css`)
b.AssertFileContent("public/index.html", `T2: Content: body{color:#333}|RelPermalink: /scss/styles2.min.css`)
b.AssertFileContent("public/index.html", `T3: Content: 24|RelPermalink: /styles/main.css|MediaType: text/css`)
b.AssertFileContent("public/index.html", `T4: Content: 24|RelPermalink: /styles/main.css|MediaType: text/css`)
b.AssertFileContent("public/index.html", `T5: Content: .content-navigation {`)
b.AssertFileContent("public/index.html", `T5 RelPermalink: /sass/styles3.css|`)
}},
{"minify", func() bool { return true }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
Min CSS: {{ ( resources.Get "css/styles1.css" | minify ).Content }}
Min JS: {{ ( resources.Get "js/script1.js" | resources.Minify ).Content | safeJS }}
Min JSON: {{ ( resources.Get "mydata/json1.json" | resources.Minify ).Content | safeHTML }}
Min XML: {{ ( resources.Get "mydata/xml1.xml" | resources.Minify ).Content | safeHTML }}
Min SVG: {{ ( resources.Get "mydata/svg1.svg" | resources.Minify ).Content | safeHTML }}
Min SVG again: {{ ( resources.Get "mydata/svg1.svg" | resources.Minify ).Content | safeHTML }}
Min HTML: {{ ( resources.Get "mydata/html1.html" | resources.Minify ).Content | safeHTML }}
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `Min CSS: h1{font-style:bold}`)
b.AssertFileContent("public/index.html", `Min JS: var x;x=5;document.getElementById(&#34;demo&#34;).innerHTML=x*10;`)
b.AssertFileContent("public/index.html", `Min JSON: {"employees":[{"firstName":"John","lastName":"Doe"},{"firstName":"Anna","lastName":"Smith"},{"firstName":"Peter","lastName":"Jones"}]}`)
b.AssertFileContent("public/index.html", `Min XML: <hello><world>Hugo Rocks!</<world></hello>`)
b.AssertFileContent("public/index.html", `Min SVG: <svg height="100" width="100"><path d="M5 10 20 40z"/></svg>`)
b.AssertFileContent("public/index.html", `Min SVG again: <svg height="100" width="100"><path d="M5 10 20 40z"/></svg>`)
b.AssertFileContent("public/index.html", `Min HTML: <a href=#>Cool</a>`)
}},
{"concat", func() bool { return true }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
{{ $a := "A" | resources.FromString "a.txt"}}
{{ $b := "B" | resources.FromString "b.txt"}}
{{ $c := "C" | resources.FromString "c.txt"}}
{{ $combined := slice $a $b $c | resources.Concat "bundle/concat.txt" }}
T: Content: {{ $combined.Content }}|RelPermalink: {{ $combined.RelPermalink }}|Permalink: {{ $combined.Permalink }}|MediaType: {{ $combined.MediaType.Type }}
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `T: Content: ABC|RelPermalink: /bundle/concat.txt|Permalink: http://example.com/bundle/concat.txt|MediaType: text/plain`)
b.AssertFileContent("public/bundle/concat.txt", "ABC")
}},
{"fromstring", func() bool { return true }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
{{ $r := "Hugo Rocks!" | resources.FromString "rocks/hugo.txt" }}
{{ $r.Content }}|{{ $r.RelPermalink }}|{{ $r.Permalink }}|{{ $r.MediaType.Type }}
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `Hugo Rocks!|/rocks/hugo.txt|http://example.com/rocks/hugo.txt|text/plain`)
b.AssertFileContent("public/rocks/hugo.txt", "Hugo Rocks!")
}},
{"execute-as-template", func() bool { return true }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
{{ $result := "{{ .Kind | upper }}" | resources.FromString "mytpl.txt" | resources.ExecuteAsTemplate "result.txt" . }}
T1: {{ $result.Content }}|{{ $result.RelPermalink}}|{{$result.MediaType.Type }}
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `T1: HOME|/result.txt|text/plain`)
}},
{"fingerprint", func() bool { return true }, func(b *sitesBuilder) {
b.WithTemplates("home.html", `
{{ $r := "ab" | resources.FromString "rocks/hugo.txt" }}
{{ $result := $r | fingerprint }}
{{ $result512 := $r | fingerprint "sha512" }}
{{ $resultMD5 := $r | fingerprint "md5" }}
T1: {{ $result.Content }}|{{ $result.RelPermalink}}|{{$result.MediaType.Type }}|{{ $result.Data.Integrity }}|
T2: {{ $result512.Content }}|{{ $result512.RelPermalink}}|{{$result512.MediaType.Type }}|{{ $result512.Data.Integrity }}|
T3: {{ $resultMD5.Content }}|{{ $resultMD5.RelPermalink}}|{{$resultMD5.MediaType.Type }}|{{ $resultMD5.Data.Integrity }}|
`)
}, func(b *sitesBuilder) {
b.AssertFileContent("public/index.html", `T1: ab|/rocks/hugo.fb8e20fc2e4c3f248c60c39bd652f3c1347298bb977b8b4d5903b85055620603.txt|text/plain|sha256-&#43;44g/C5MPySMYMOb1lLzwTRymLuXe4tNWQO4UFViBgM=|`)
b.AssertFileContent("public/index.html", `T2: ab|/rocks/hugo.2d408a0717ec188158278a796c689044361dc6fdde28d6f04973b80896e1823975cdbf12eb63f9e0591328ee235d80e9b5bf1aa6a44f4617ff3caf6400eb172d.txt|text/plain|sha512-LUCKBxfsGIFYJ4p5bGiQRDYdxv3eKNbwSXO4CJbhgjl1zb8S62P54FkTKO4jXYDptb8apqRPRhf/PK9kAOsXLQ==|`)
b.AssertFileContent("public/index.html", `T3: ab|/rocks/hugo.187ef4436122d1cc2f40dc2b92f0eba0.txt|text/plain|md5-GH70Q2Ei0cwvQNwrkvDroA==|`)
}},
{"template", func() bool { return true }, func(b *sitesBuilder) {}, func(b *sitesBuilder) {
}},
}
for _, test := range tests {
if !test.shouldRun() {
t.Log("Skip", test.name)
continue
}
b := newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger())
b.WithSimpleConfigFile()
b.WithContent("page.md", `
---
title: Hello
---
`)
b.WithSourceFile(filepath.Join("assets", "css", "styles1.css"), `
h1 {
font-style: bold;
}
`)
b.WithSourceFile(filepath.Join("assets", "js", "script1.js"), `
var x;
x = 5;
document.getElementById("demo").innerHTML = x * 10;
`)
b.WithSourceFile(filepath.Join("assets", "mydata", "json1.json"), `
{
"employees":[
{"firstName":"John", "lastName":"Doe"},
{"firstName":"Anna", "lastName":"Smith"},
{"firstName":"Peter", "lastName":"Jones"}
]
}
`)
b.WithSourceFile(filepath.Join("assets", "mydata", "svg1.svg"), `
<svg height="100" width="100">
<line x1="5" y1="10" x2="20" y2="40"/>
</svg>
`)
b.WithSourceFile(filepath.Join("assets", "mydata", "xml1.xml"), `
<hello>
<world>Hugo Rocks!</<world>
</hello>
`)
b.WithSourceFile(filepath.Join("assets", "mydata", "html1.html"), `
<html>
<a href="#">
Cool
</a >
</html>
`)
b.WithSourceFile(filepath.Join("assets", "scss", "styles2.scss"), `
$color: #333;
body {
color: $color;
}
`)
b.WithSourceFile(filepath.Join("assets", "sass", "styles3.sass"), `
$color: #333;
.content-navigation
border-color: $color
`)
t.Log("Test", test.name)
test.prepare(b)
b.Build(BuildCfg{})
test.verify(b)
}
}

View File

@ -545,7 +545,7 @@ Loop:
}
var err error
isInner, err = isInnerShortcode(tmpl)
isInner, err = isInnerShortcode(tmpl.(tpl.TemplateExecutor))
if err != nil {
return sc, fmt.Errorf("Failed to handle template for shortcode %q for page %q: %s", sc.name, p.Path(), err)
}
@ -709,7 +709,7 @@ func replaceShortcodeTokens(source []byte, prefix string, replacements map[strin
return source, nil
}
func getShortcodeTemplateForTemplateKey(key scKey, shortcodeName string, t tpl.TemplateFinder) *tpl.TemplateAdapter {
func getShortcodeTemplateForTemplateKey(key scKey, shortcodeName string, t tpl.TemplateFinder) tpl.Template {
isInnerShortcodeCache.RLock()
defer isInnerShortcodeCache.RUnlock()
@ -737,13 +737,13 @@ func getShortcodeTemplateForTemplateKey(key scKey, shortcodeName string, t tpl.T
for _, name := range names {
if x := t.Lookup("shortcodes/" + name); x != nil {
if x, found := t.Lookup("shortcodes/" + name); found {
return x
}
if x := t.Lookup("theme/shortcodes/" + name); x != nil {
if x, found := t.Lookup("theme/shortcodes/" + name); found {
return x
}
if x := t.Lookup("_internal/shortcodes/" + name); x != nil {
if x, found := t.Lookup("_internal/shortcodes/" + name); found {
return x
}
}

View File

@ -27,12 +27,12 @@ import (
"strings"
"time"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/langs"
src "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/resource"
"golang.org/x/sync/errgroup"
"github.com/gohugoio/hugo/config"
@ -141,7 +141,6 @@ type Site struct {
// Logger etc.
*deps.Deps `json:"-"`
resourceSpec *resource.Spec
// The func used to title case titles.
titleFunc func(s string) string
@ -188,7 +187,6 @@ func (s *Site) reset() *Site {
outputFormatsConfig: s.outputFormatsConfig,
frontmatterHandler: s.frontmatterHandler,
mediaTypesConfig: s.mediaTypesConfig,
resourceSpec: s.resourceSpec,
Language: s.Language,
owner: s.owner,
PageCollections: newPageCollections()}
@ -691,7 +689,11 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
logger = helpers.NewDistinctFeedbackLogger()
)
for _, ev := range events {
cachePartitions := make([]string, len(events))
for i, ev := range events {
cachePartitions[i] = resource.ResourceKeyPartition(ev.Name)
if s.isContentDirEvent(ev) {
logger.Println("Source changed", ev)
sourceChanged = append(sourceChanged, ev)
@ -717,6 +719,11 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
}
}
// These in memory resource caches will be rebuilt on demand.
for _, s := range s.owner.Sites {
s.ResourceSpec.ResourceCache.DeletePartitions(cachePartitions...)
}
if len(tmplChanged) > 0 || len(i18nChanged) > 0 {
sites := s.owner.Sites
first := sites[0]
@ -731,7 +738,11 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
for i := 1; i < len(sites); i++ {
site := sites[i]
var err error
site.Deps, err = first.Deps.ForLanguage(site.Language)
depsCfg := deps.DepsCfg{
Language: site.Language,
MediaTypes: site.mediaTypesConfig,
}
site.Deps, err = first.Deps.ForLanguage(depsCfg)
if err != nil {
return whatChanged{}, err
}
@ -797,6 +808,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
if err := s.readAndProcessContent(filenamesChanged...); err != nil {
return whatChanged{}, err
}
}
changed := whatChanged{
@ -1240,7 +1252,7 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
mainHandler := &contentCaptureResultHandler{contentProcessors: contentProcessors, defaultContentProcessor: defaultContentProcessor}
sourceSpec := source.NewSourceSpec(s.PathSpec, s.BaseFs.ContentFs)
sourceSpec := source.NewSourceSpec(s.PathSpec, s.BaseFs.Content.Fs)
if s.running() {
// Need to track changes.
@ -1717,6 +1729,8 @@ func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts
templName = templ.Name()
}
s.DistinctErrorLog.Printf("Failed to render %q: %s", templName, r)
s.DistinctErrorLog.Printf("Stack Trace:\n%s", stackTrace(1200))
// TOD(bep) we really need to fix this. Also see below.
if !s.running() && !testMode {
os.Exit(-1)
@ -1753,7 +1767,7 @@ func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts
func (s *Site) findFirstTemplate(layouts ...string) tpl.Template {
for _, layout := range layouts {
if templ := s.Tmpl.Lookup(layout); templ != nil {
if templ, found := s.Tmpl.Lookup(layout); found {
return templ
}
}
@ -1782,7 +1796,7 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page {
pageContentInit: &pageContentInit{},
Kind: typ,
Source: Source{File: &source.FileInfo{}},
Data: make(map[string]interface{}),
data: make(map[string]interface{}),
Site: &s.Info,
sections: sections,
s: s}
@ -1797,7 +1811,7 @@ func (s *Site) newHomePage() *Page {
p := s.newNodePage(KindHome)
p.title = s.Info.Title
pages := Pages{}
p.Data["Pages"] = pages
p.data["Pages"] = pages
p.Pages = pages
return p
}

View File

@ -252,7 +252,7 @@ func (s *Site) renderRSS(p *PageOutput) error {
limit := s.Cfg.GetInt("rssLimit")
if limit >= 0 && len(p.Pages) > limit {
p.Pages = p.Pages[:limit]
p.Data["Pages"] = p.Pages
p.data["Pages"] = p.Pages
}
layouts, err := s.layoutHandler.For(
@ -279,7 +279,7 @@ func (s *Site) render404() error {
p := s.newNodePage(kind404)
p.title = "404 Page not found"
p.Data["Pages"] = s.Pages
p.data["Pages"] = s.Pages
p.Pages = s.Pages
p.URLPath.URL = "404.html"
@ -326,7 +326,7 @@ func (s *Site) renderSitemap() error {
page.Sitemap.Priority = sitemapDefault.Priority
page.Sitemap.Filename = sitemapDefault.Filename
n.Data["Pages"] = pages
n.data["Pages"] = pages
n.Pages = pages
// TODO(bep) we have several of these
@ -369,7 +369,7 @@ func (s *Site) renderRobotsTXT() error {
if err := p.initTargetPathDescriptor(); err != nil {
return err
}
p.Data["Pages"] = s.Pages
p.data["Pages"] = s.Pages
p.Pages = s.Pages
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}

View File

@ -357,6 +357,6 @@ func (s *Site) assembleSections() Pages {
func (p *Page) setPagePages(pages Pages) {
pages.Sort()
p.Pages = pages
p.Data = make(map[string]interface{})
p.Data["Pages"] = pages
p.data = make(map[string]interface{})
p.data["Pages"] = pages
}

View File

@ -277,7 +277,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
assert.NotNil(p, fmt.Sprint(sections))
if p.Pages != nil {
assert.Equal(p.Pages, p.Data["Pages"])
assert.Equal(p.Pages, p.data["Pages"])
}
assert.NotNil(p.Parent(), fmt.Sprintf("Parent nil: %q", test.sections))
test.verify(p)

View File

@ -441,7 +441,7 @@ func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) {
content := readDestination(s.T, s.Fs, filename)
for _, match := range matches {
if !strings.Contains(content, match) {
s.Fatalf("No match for %q in content for %s\n%s", match, filename, content)
s.Fatalf("No match for %q in content for %s\n%s\n%q", match, filename, content, content)
}
}
}
@ -519,7 +519,7 @@ func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *helpers.PathSpec {
return ps
}
func newTestDefaultPathSpec() *helpers.PathSpec {
func newTestDefaultPathSpec(t *testing.T) *helpers.PathSpec {
v := viper.New()
// Easier to reason about in tests.
v.Set("disablePathToLower", true)
@ -528,8 +528,14 @@ func newTestDefaultPathSpec() *helpers.PathSpec {
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("archetypeDir", "archetypes")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
fs := hugofs.NewDefault(v)
ps, _ := helpers.NewPathSpec(fs, v)
ps, err := helpers.NewPathSpec(fs, v)
if err != nil {
t.Fatal(err)
}
return ps
}

View File

@ -205,6 +205,9 @@ func TestI18nTranslate(t *testing.T) {
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("archetypeDir", "archetypes")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
// Test without and with placeholders
for _, enablePlaceholders := range []bool{false, true} {

View File

@ -46,17 +46,17 @@ func Vendor() error {
// Build hugo binary
func Hugo() error {
return sh.RunWith(flagEnv(), goexe, "build", "-ldflags", ldflags, packageName)
return sh.RunWith(flagEnv(), goexe, "build", "-ldflags", ldflags, "-tags", buildTags(), packageName)
}
// Build hugo binary with race detector enabled
func HugoRace() error {
return sh.RunWith(flagEnv(), goexe, "build", "-race", "-ldflags", ldflags, packageName)
return sh.RunWith(flagEnv(), goexe, "build", "-race", "-ldflags", ldflags, "-tags", buildTags(), packageName)
}
// Install hugo binary
func Install() error {
return sh.RunWith(flagEnv(), goexe, "install", "-ldflags", ldflags, packageName)
return sh.RunWith(flagEnv(), goexe, "install", "-ldflags", ldflags, "-tags", buildTags(), packageName)
}
func flagEnv() map[string]string {
@ -111,18 +111,19 @@ func Check() {
}
// Run tests in 32-bit mode
// Note that we don't run with the extended tag. Currently not supported in 32 bit.
func Test386() error {
return sh.RunWith(map[string]string{"GOARCH": "386"}, goexe, "test", "./...")
}
// Run tests
func Test() error {
return sh.Run(goexe, "test", "./...")
return sh.Run(goexe, "test", "./...", "-tags", buildTags())
}
// Run tests with race detector
func TestRace() error {
return sh.Run(goexe, "test", "-race", "./...")
return sh.Run(goexe, "test", "-race", "./...", "-tags", buildTags())
}
// Run gofmt linter
@ -266,3 +267,13 @@ func CheckVendor() error {
func isGoLatest() bool {
return strings.Contains(runtime.Version(), "1.10")
}
func buildTags() string {
// To build the extended Hugo SCSS/SASS enabled version, build with
// HUGO_BUILD_TAGS=extended mage install etc.
if envtags := os.Getenv("HUGO_BUILD_TAGS"); envtags != "" {
return envtags
}
return "none"
}

View File

@ -50,7 +50,8 @@ func FromString(t string) (Type, error) {
mainType := parts[0]
subParts := strings.Split(parts[1], "+")
subType := subParts[0]
subType := strings.Split(subParts[0], ";")[0]
var suffix string
if len(subParts) == 1 {
@ -85,25 +86,38 @@ func (m Type) FullSuffix() string {
var (
CalendarType = Type{"text", "calendar", "ics", defaultDelimiter}
CSSType = Type{"text", "css", "css", defaultDelimiter}
SCSSType = Type{"text", "x-scss", "scss", defaultDelimiter}
SASSType = Type{"text", "x-sass", "sass", defaultDelimiter}
CSVType = Type{"text", "csv", "csv", defaultDelimiter}
HTMLType = Type{"text", "html", "html", defaultDelimiter}
JavascriptType = Type{"application", "javascript", "js", defaultDelimiter}
JSONType = Type{"application", "json", "json", defaultDelimiter}
RSSType = Type{"application", "rss", "xml", defaultDelimiter}
XMLType = Type{"application", "xml", "xml", defaultDelimiter}
// The official MIME type of SVG is image/svg+xml. We currently only support one extension
// per mime type. The workaround in projects is to create multiple media type definitions,
// but we need to improve this to take other known suffixes into account.
// But until then, svg has an svg extension, which is very common. TODO(bep)
SVGType = Type{"image", "svg", "svg", defaultDelimiter}
TextType = Type{"text", "plain", "txt", defaultDelimiter}
OctetType = Type{"application", "octet-stream", "", ""}
)
var DefaultTypes = Types{
CalendarType,
CSSType,
CSVType,
SCSSType,
SASSType,
HTMLType,
JavascriptType,
JSONType,
RSSType,
XMLType,
SVGType,
TextType,
OctetType,
}
func init() {
@ -125,6 +139,16 @@ func (t Types) GetByType(tp string) (Type, bool) {
return Type{}, false
}
// GetFirstBySuffix will return the first media type matching the given suffix.
func (t Types) GetFirstBySuffix(suffix string) (Type, bool) {
for _, tt := range t {
if strings.EqualFold(suffix, tt.Suffix) {
return tt, true
}
}
return Type{}, false
}
// GetBySuffix gets a media type given as suffix, e.g. "html".
// It will return false if no format could be found, or if the suffix given
// is ambiguous.

View File

@ -30,12 +30,15 @@ func TestDefaultTypes(t *testing.T) {
}{
{CalendarType, "text", "calendar", "ics", "text/calendar", "text/calendar+ics"},
{CSSType, "text", "css", "css", "text/css", "text/css+css"},
{SCSSType, "text", "x-scss", "scss", "text/x-scss", "text/x-scss+scss"},
{CSVType, "text", "csv", "csv", "text/csv", "text/csv+csv"},
{HTMLType, "text", "html", "html", "text/html", "text/html+html"},
{JavascriptType, "application", "javascript", "js", "application/javascript", "application/javascript+js"},
{JSONType, "application", "json", "json", "application/json", "application/json+json"},
{RSSType, "application", "rss", "xml", "application/rss", "application/rss+xml"},
{SVGType, "image", "svg", "svg", "image/svg", "image/svg+svg"},
{TextType, "text", "plain", "txt", "text/plain", "text/plain+txt"},
{XMLType, "application", "xml", "xml", "application/xml", "application/xml+xml"},
} {
require.Equal(t, test.expectedMainType, test.tp.MainType)
require.Equal(t, test.expectedSubType, test.tp.SubType)
@ -60,6 +63,13 @@ func TestGetByType(t *testing.T) {
require.False(t, found)
}
func TestGetFirstBySuffix(t *testing.T) {
assert := require.New(t)
f, found := DefaultTypes.GetFirstBySuffix("xml")
assert.True(found)
assert.Equal(Type{MainType: "application", SubType: "rss", Suffix: "xml", Delimiter: "."}, f)
}
func TestFromTypeString(t *testing.T) {
f, err := FromString("text/html")
require.NoError(t, err)
@ -76,6 +86,10 @@ func TestFromTypeString(t *testing.T) {
_, err = FromString("noslash")
require.Error(t, err)
f, err = FromString("text/xml; charset=utf-8")
require.NoError(t, err)
require.Equal(t, Type{MainType: "text", SubType: "xml", Suffix: "xml", Delimiter: "."}, f)
}
func TestDecodeTypes(t *testing.T) {

121
resource/bundler/bundler.go Normal file
View File

@ -0,0 +1,121 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package bundler contains functions for concatenation etc. of Resource objects.
package bundler
import (
"errors"
"fmt"
"io"
"path/filepath"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resource"
)
// Client contains methods perform concatenation and other bundling related
// tasks to Resource objects.
type Client struct {
rs *resource.Spec
}
// New creates a new Client with the given specification.
func New(rs *resource.Spec) *Client {
return &Client{rs: rs}
}
type multiReadSeekCloser struct {
mr io.Reader
sources []resource.ReadSeekCloser
}
func (r *multiReadSeekCloser) Read(p []byte) (n int, err error) {
return r.mr.Read(p)
}
func (r *multiReadSeekCloser) Seek(offset int64, whence int) (newOffset int64, err error) {
for _, s := range r.sources {
newOffset, err = s.Seek(offset, whence)
if err != nil {
return
}
}
return
}
func (r *multiReadSeekCloser) Close() error {
for _, s := range r.sources {
s.Close()
}
return nil
}
// Concat concatenates the list of Resource objects.
func (c *Client) Concat(targetPath string, resources []resource.Resource) (resource.Resource, error) {
// The CACHE_OTHER will make sure this will be re-created and published on rebuilds.
return c.rs.ResourceCache.GetOrCreate(resource.CACHE_OTHER, targetPath, func() (resource.Resource, error) {
var resolvedm media.Type
// The given set of resources must be of the same Media Type.
// We may improve on that in the future, but then we need to know more.
for i, r := range resources {
if i > 0 && r.MediaType() != resolvedm {
return nil, errors.New("resources in Concat must be of the same Media Type")
}
resolvedm = r.MediaType()
}
concatr := func() (resource.ReadSeekCloser, error) {
var rcsources []resource.ReadSeekCloser
for _, s := range resources {
rcr, ok := s.(resource.ReadSeekCloserResource)
if !ok {
return nil, fmt.Errorf("resource %T does not implement resource.ReadSeekerCloserResource", s)
}
rc, err := rcr.ReadSeekCloser()
if err != nil {
// Close the already opened.
for _, rcs := range rcsources {
rcs.Close()
}
return nil, err
}
rcsources = append(rcsources, rc)
}
readers := make([]io.Reader, len(rcsources))
for i := 0; i < len(rcsources); i++ {
readers[i] = rcsources[i]
}
mr := io.MultiReader(readers...)
return &multiReadSeekCloser{mr: mr, sources: rcsources}, nil
}
composite, err := c.rs.NewForFs(
c.rs.BaseFs.Resources.Fs,
resource.ResourceSourceDescriptor{
LazyPublish: true,
OpenReadSeekCloser: concatr,
RelTargetFilename: filepath.Clean(targetPath)})
if err != nil {
return nil, err
}
return composite, nil
})
}

77
resource/create/create.go Normal file
View File

@ -0,0 +1,77 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package create contains functions for to create Resource objects. This will
// typically non-files.
package create
import (
"io"
"path/filepath"
"github.com/spf13/afero"
"github.com/dsnet/golib/memfile"
"github.com/gohugoio/hugo/resource"
)
// Client contains methods to create Resource objects.
// tasks to Resource objects.
type Client struct {
rs *resource.Spec
}
// New creates a new Client with the given specification.
func New(rs *resource.Spec) *Client {
return &Client{rs: rs}
}
type memFileCloser struct {
*memfile.File
io.Closer
}
func (m *memFileCloser) Close() error {
return nil
}
// Get creates a new Resource by opening the given filename in the given filesystem.
func (c *Client) Get(fs afero.Fs, filename string) (resource.Resource, error) {
filename = filepath.Clean(filename)
return c.rs.ResourceCache.GetOrCreate(resource.ResourceKeyPartition(filename), filename, func() (resource.Resource, error) {
return c.rs.NewForFs(fs,
resource.ResourceSourceDescriptor{
LazyPublish: true,
SourceFilename: filename})
})
}
// FromString creates a new Resource from a string with the given relative target path.
func (c *Client) FromString(targetPath, content string) (resource.Resource, error) {
return c.rs.ResourceCache.GetOrCreate(resource.CACHE_OTHER, targetPath, func() (resource.Resource, error) {
return c.rs.NewForFs(
c.rs.BaseFs.Resources.Fs,
resource.ResourceSourceDescriptor{
LazyPublish: true,
OpenReadSeekCloser: func() (resource.ReadSeekCloser, error) {
return &memFileCloser{
File: memfile.New([]byte(content)),
}, nil
},
RelTargetFilename: filepath.Clean(targetPath)})
})
}

View File

@ -19,14 +19,12 @@ import (
"image/color"
"io"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/mitchellh/mapstructure"
"github.com/gohugoio/hugo/helpers"
"github.com/spf13/afero"
// Importing image codecs for image.DecodeConfig
"image"
@ -132,8 +130,6 @@ type Image struct {
format imaging.Format
hash string
*genericResource
}
@ -151,7 +147,6 @@ func (i *Image) Height() int {
func (i *Image) WithNewBase(base string) Resource {
return &Image{
imaging: i.imaging,
hash: i.hash,
format: i.format,
genericResource: i.genericResource.WithNewBase(base).(*genericResource)}
}
@ -209,7 +204,7 @@ type imageConfig struct {
}
func (i *Image) isJPEG() bool {
name := strings.ToLower(i.relTargetPath.file)
name := strings.ToLower(i.relTargetDirFile.file)
return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg")
}
@ -241,7 +236,7 @@ func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, c
ci := i.clone()
errOp := action
errPath := i.AbsSourceFilename()
errPath := i.sourceFilename
ci.setBasePath(conf)
@ -273,7 +268,7 @@ func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, c
ci.config = image.Config{Width: b.Max.X, Height: b.Max.Y}
ci.configLoaded = true
return ci, i.encodeToDestinations(converted, conf, resourceCacheFilename, ci.target())
return ci, i.encodeToDestinations(converted, conf, resourceCacheFilename, ci.targetFilename())
})
}
@ -415,11 +410,11 @@ func (i *Image) initConfig() error {
}
var (
f afero.File
f ReadSeekCloser
config image.Config
)
f, err = i.sourceFs().Open(i.AbsSourceFilename())
f, err = i.ReadSeekCloser()
if err != nil {
return
}
@ -440,19 +435,19 @@ func (i *Image) initConfig() error {
}
func (i *Image) decodeSource() (image.Image, error) {
file, err := i.sourceFs().Open(i.AbsSourceFilename())
f, err := i.ReadSeekCloser()
if err != nil {
return nil, fmt.Errorf("failed to open image for decode: %s", err)
}
defer file.Close()
img, _, err := image.Decode(file)
defer f.Close()
img, _, err := image.Decode(f)
return img, err
}
func (i *Image) copyToDestination(src string) error {
var res error
i.copyToDestinationInit.Do(func() {
target := i.target()
target := i.targetFilename()
// Fast path:
// This is a processed version of the original.
@ -469,23 +464,12 @@ func (i *Image) copyToDestination(src string) error {
}
defer in.Close()
out, err := i.spec.BaseFs.PublishFs.Create(target)
if err != nil && os.IsNotExist(err) {
// When called from shortcodes, the target directory may not exist yet.
// See https://github.com/gohugoio/hugo/issues/4202
if err = i.spec.BaseFs.PublishFs.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
res = err
return
}
out, err = i.spec.BaseFs.PublishFs.Create(target)
out, err := openFileForWriting(i.spec.BaseFs.PublishFs, target)
if err != nil {
res = err
return
}
} else if err != nil {
res = err
return
}
defer out.Close()
_, err = io.Copy(out, in)
@ -501,23 +485,12 @@ func (i *Image) copyToDestination(src string) error {
return nil
}
func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, filename string) error {
target := filepath.Clean(filename)
func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, targetFilename string) error {
file1, err := i.spec.BaseFs.PublishFs.Create(target)
if err != nil && os.IsNotExist(err) {
// When called from shortcodes, the target directory may not exist yet.
// See https://github.com/gohugoio/hugo/issues/4202
if err = i.spec.BaseFs.PublishFs.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
return err
}
file1, err = i.spec.BaseFs.PublishFs.Create(target)
file1, err := openFileForWriting(i.spec.BaseFs.PublishFs, targetFilename)
if err != nil {
return err
}
} else if err != nil {
return err
}
defer file1.Close()
@ -525,11 +498,7 @@ func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resource
if resourceCacheFilename != "" {
// Also save it to the image resource cache for later reuse.
if err = i.spec.BaseFs.ResourcesFs.MkdirAll(filepath.Dir(resourceCacheFilename), os.FileMode(0755)); err != nil {
return err
}
file2, err := i.spec.BaseFs.ResourcesFs.Create(resourceCacheFilename)
file2, err := openFileForWriting(i.spec.BaseFs.Resources.Fs, resourceCacheFilename)
if err != nil {
return err
}
@ -572,17 +541,16 @@ func (i *Image) clone() *Image {
return &Image{
imaging: i.imaging,
hash: i.hash,
format: i.format,
genericResource: &g}
}
func (i *Image) setBasePath(conf imageConfig) {
i.relTargetPath = i.relTargetPathFromConfig(conf)
i.relTargetDirFile = i.relTargetPathFromConfig(conf)
}
func (i *Image) relTargetPathFromConfig(conf imageConfig) dirFile {
p1, p2 := helpers.FileAndExt(i.relTargetPath.file)
p1, p2 := helpers.FileAndExt(i.relTargetDirFile.file)
idStr := fmt.Sprintf("_hu%s_%d", i.hash, i.osFileInfo.Size())
@ -611,7 +579,7 @@ func (i *Image) relTargetPathFromConfig(conf imageConfig) dirFile {
}
return dirFile{
dir: i.relTargetPath.dir,
dir: i.relTargetDirFile.dir,
file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2),
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -60,12 +60,6 @@ func (c *imageCache) getOrCreate(
relTarget := parent.relTargetPathFromConfig(conf)
key := parent.relTargetPathForRel(relTarget.path(), false)
if c.pathSpec.Language != nil {
// Avoid do and store more work than needed. The language versions will in
// most cases be duplicates of the same image files.
key = strings.TrimPrefix(key, "/"+c.pathSpec.Language.Lang)
}
// First check the in-memory store, then the disk.
c.mu.RLock()
img, found := c.store[key]
@ -88,17 +82,17 @@ func (c *imageCache) getOrCreate(
// but the count of processed image variations for this site.
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
exists, err := helpers.Exists(cacheFilename, c.pathSpec.BaseFs.ResourcesFs)
exists, err := helpers.Exists(cacheFilename, c.pathSpec.BaseFs.Resources.Fs)
if err != nil {
return nil, err
}
if exists {
img = parent.clone()
img.relTargetPath.file = relTarget.file
img.relTargetDirFile.file = relTarget.file
img.sourceFilename = cacheFilename
// We have to look resources file system for this.
img.overriddenSourceFs = img.spec.BaseFs.ResourcesFs
// We have to look in the resources file system for this.
img.overriddenSourceFs = img.spec.BaseFs.Resources.Fs
} else {
img, err = create(cacheFilename)
if err != nil {

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -78,19 +78,19 @@ func TestImageTransformBasic(t *testing.T) {
assert.NoError(err)
assert.Equal(320, resized0x.Width())
assert.Equal(200, resized0x.Height())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resized0x.RelPermalink(), 320, 200)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, resized0x.RelPermalink(), 320, 200)
resizedx0, err := image.Resize("200x")
assert.NoError(err)
assert.Equal(200, resizedx0.Width())
assert.Equal(125, resizedx0.Height())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedx0.RelPermalink(), 200, 125)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, resizedx0.RelPermalink(), 200, 125)
resizedAndRotated, err := image.Resize("x200 r90")
assert.NoError(err)
assert.Equal(125, resizedAndRotated.Width())
assert.Equal(200, resizedAndRotated.Height())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedAndRotated.RelPermalink(), 125, 200)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, resizedAndRotated.RelPermalink(), 125, 200)
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg", resized.RelPermalink())
assert.Equal(300, resized.Width())
@ -115,20 +115,20 @@ func TestImageTransformBasic(t *testing.T) {
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg", filled.RelPermalink())
assert.Equal(200, filled.Width())
assert.Equal(100, filled.Height())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, filled.RelPermalink(), 200, 100)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, filled.RelPermalink(), 200, 100)
smart, err := image.Fill("200x100 smart")
assert.NoError(err)
assert.Equal(fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", smartCropVersionNumber), smart.RelPermalink())
assert.Equal(200, smart.Width())
assert.Equal(100, smart.Height())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, smart.RelPermalink(), 200, 100)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, smart.RelPermalink(), 200, 100)
// Check cache
filledAgain, err := image.Fill("200x100 bottomLeft")
assert.NoError(err)
assert.True(filled == filledAgain)
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, filledAgain.RelPermalink(), 200, 100)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, filledAgain.RelPermalink(), 200, 100)
}
@ -298,7 +298,7 @@ func TestImageResizeInSubPath(t *testing.T) {
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resized.RelPermalink())
assert.Equal(101, resized.Width())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resized.RelPermalink(), 101, 101)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, resized.RelPermalink(), 101, 101)
publishedImageFilename := filepath.Clean(resized.RelPermalink())
assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
assert.NoError(image.spec.BaseFs.PublishFs.Remove(publishedImageFilename))
@ -310,7 +310,7 @@ func TestImageResizeInSubPath(t *testing.T) {
assert.NoError(err)
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resizedAgain.RelPermalink())
assert.Equal(101, resizedAgain.Width())
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedAgain.RelPermalink(), 101, 101)
assertFileCache(assert, image.spec.BaseFs.Resources.Fs, resizedAgain.RelPermalink(), 101, 101)
assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
}

View File

@ -0,0 +1,106 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package integrity
import (
"crypto/md5"
"crypto/sha256"
"crypto/sha512"
"encoding/base64"
"encoding/hex"
"fmt"
"hash"
"io"
"github.com/gohugoio/hugo/resource"
)
const defaultHashAlgo = "sha256"
// Client contains methods to fingerprint (cachebusting) and other integrity-related
// methods.
type Client struct {
rs *resource.Spec
}
// New creates a new Client with the given specification.
func New(rs *resource.Spec) *Client {
return &Client{rs: rs}
}
type fingerprintTransformation struct {
algo string
}
func (t *fingerprintTransformation) Key() resource.ResourceTransformationKey {
return resource.NewResourceTransformationKey("fingerprint", t.algo)
}
// Transform creates a MD5 hash of the Resource content and inserts that hash before
// the extension in the filename.
func (t *fingerprintTransformation) Transform(ctx *resource.ResourceTransformationCtx) error {
algo := t.algo
var h hash.Hash
switch algo {
case "md5":
h = md5.New()
case "sha256":
h = sha256.New()
case "sha512":
h = sha512.New()
default:
return fmt.Errorf("unsupported crypto algo: %q, use either md5, sha256 or sha512", algo)
}
io.Copy(io.MultiWriter(h, ctx.To), ctx.From)
d, err := digest(h)
if err != nil {
return err
}
ctx.Data["Integrity"] = integrity(algo, d)
ctx.AddOutPathIdentifier("." + hex.EncodeToString(d[:]))
return nil
}
// Fingerprint applies fingerprinting of the given resource and hash algorithm.
// It defaults to sha256 if none given, and the options are md5, sha256 or sha512.
// The same algo is used for both the fingerprinting part (aka cache busting) and
// the base64-encoded Subresource Integrity hash, so you will have to stay away from
// md5 if you plan to use both.
// See https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity
func (c *Client) Fingerprint(res resource.Resource, algo string) (resource.Resource, error) {
if algo == "" {
algo = defaultHashAlgo
}
return c.rs.Transform(
res,
&fingerprintTransformation{algo: algo},
)
}
func integrity(algo string, sum []byte) string {
encoded := base64.StdEncoding.EncodeToString(sum)
return fmt.Sprintf("%s-%s", algo, encoded)
}
func digest(h hash.Hash) ([]byte, error) {
sum := h.Sum(nil)
//enc := hex.EncodeToString(sum[:])
return sum, nil
}

View File

@ -0,0 +1,54 @@
// Copyright 2018-present The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package integrity
import (
"github.com/gohugoio/hugo/media"
)
type testResource struct {
content string
}
func (r testResource) Permalink() string {
panic("not implemented")
}
func (r testResource) RelPermalink() string {
panic("not implemented")
}
func (r testResource) ResourceType() string {
panic("not implemented")
}
func (r testResource) Name() string {
panic("not implemented")
}
func (r testResource) MediaType() media.Type {
panic("not implemented")
}
func (r testResource) Title() string {
panic("not implemented")
}
func (r testResource) Params() map[string]interface{} {
panic("not implemented")
}
func (r testResource) Bytes() ([]byte, error) {
return []byte(r.content), nil
}

View File

@ -0,0 +1,115 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package minifiers
import (
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resource"
"github.com/tdewolff/minify"
"github.com/tdewolff/minify/css"
"github.com/tdewolff/minify/html"
"github.com/tdewolff/minify/js"
"github.com/tdewolff/minify/json"
"github.com/tdewolff/minify/svg"
"github.com/tdewolff/minify/xml"
)
// Client for minification of Resource objects. Supported minfiers are:
// css, html, js, json, svg and xml.
type Client struct {
rs *resource.Spec
m *minify.M
}
// New creates a new Client given a specification. Note that it is the media types
// configured for the site that is used to match files to the correct minifier.
func New(rs *resource.Spec) *Client {
m := minify.New()
mt := rs.MediaTypes
// We use the Type definition of the media types defined in the site if found.
addMinifierFunc(m, mt, "text/css", "css", css.Minify)
addMinifierFunc(m, mt, "text/html", "html", html.Minify)
addMinifierFunc(m, mt, "application/javascript", "js", js.Minify)
addMinifierFunc(m, mt, "application/json", "json", json.Minify)
addMinifierFunc(m, mt, "image/svg", "xml", svg.Minify)
addMinifierFunc(m, mt, "application/xml", "xml", xml.Minify)
addMinifierFunc(m, mt, "application/rss", "xml", xml.Minify)
return &Client{rs: rs, m: m}
}
func addMinifierFunc(m *minify.M, mt media.Types, typeString, suffix string, fn minify.MinifierFunc) {
resolvedTypeStr := resolveMediaTypeString(mt, typeString, suffix)
m.AddFunc(resolvedTypeStr, fn)
if resolvedTypeStr != typeString {
m.AddFunc(typeString, fn)
}
}
type minifyTransformation struct {
rs *resource.Spec
m *minify.M
}
func (t *minifyTransformation) Key() resource.ResourceTransformationKey {
return resource.NewResourceTransformationKey("minify")
}
func (t *minifyTransformation) Transform(ctx *resource.ResourceTransformationCtx) error {
mtype := resolveMediaTypeString(
t.rs.MediaTypes,
ctx.InMediaType.Type(),
helpers.ExtNoDelimiter(ctx.InPath),
)
if err := t.m.Minify(mtype, ctx.To, ctx.From); err != nil {
return err
}
ctx.AddOutPathIdentifier(".min")
return nil
}
func (c *Client) Minify(res resource.Resource) (resource.Resource, error) {
return c.rs.Transform(
res,
&minifyTransformation{
rs: c.rs,
m: c.m},
)
}
func resolveMediaTypeString(types media.Types, typeStr, suffix string) string {
if m, found := resolveMediaType(types, typeStr, suffix); found {
return m.Type()
}
// Fall back to the default.
return typeStr
}
// Make sure we match the matching pattern with what the user have actually defined
// in his or hers media types configuration.
func resolveMediaType(types media.Types, typeStr, suffix string) (media.Type, bool) {
if m, found := types.GetByType(typeStr); found {
return m, true
}
if m, found := types.GetFirstBySuffix(suffix); found {
return m, true
}
return media.Type{}, false
}

175
resource/postcss/postcss.go Normal file
View File

@ -0,0 +1,175 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package postcss
import (
"fmt"
"io"
"path/filepath"
"github.com/gohugoio/hugo/hugofs"
"github.com/mitchellh/mapstructure"
// "io/ioutil"
"os"
"os/exec"
"github.com/gohugoio/hugo/common/errors"
"github.com/gohugoio/hugo/resource"
)
// Some of the options from https://github.com/postcss/postcss-cli
type Options struct {
// Set a custom path to look for a config file.
Config string
NoMap bool `mapstructure:"no-map"` // Disable the default inline sourcemaps
// Options for when not using a config file
Use string // List of postcss plugins to use
Parser string // Custom postcss parser
Stringifier string // Custom postcss stringifier
Syntax string // Custom postcss syntax
}
func DecodeOptions(m map[string]interface{}) (opts Options, err error) {
if m == nil {
return
}
err = mapstructure.WeakDecode(m, &opts)
return
}
func (opts Options) toArgs() []string {
var args []string
if opts.NoMap {
args = append(args, "--no-map")
}
if opts.Use != "" {
args = append(args, "--use", opts.Use)
}
if opts.Parser != "" {
args = append(args, "--parser", opts.Parser)
}
if opts.Stringifier != "" {
args = append(args, "--stringifier", opts.Stringifier)
}
if opts.Syntax != "" {
args = append(args, "--syntax", opts.Syntax)
}
return args
}
// Client is the client used to do PostCSS transformations.
type Client struct {
rs *resource.Spec
}
// New creates a new Client with the given specification.
func New(rs *resource.Spec) *Client {
return &Client{rs: rs}
}
type postcssTransformation struct {
options Options
rs *resource.Spec
}
func (t *postcssTransformation) Key() resource.ResourceTransformationKey {
return resource.NewResourceTransformationKey("postcss", t.options)
}
// Transform shells out to postcss-cli to do the heavy lifting.
// For this to work, you need some additional tools. To install them globally:
// npm install -g postcss-cli
// npm install -g autoprefixer
func (t *postcssTransformation) Transform(ctx *resource.ResourceTransformationCtx) error {
const binary = "postcss"
if _, err := exec.LookPath(binary); err != nil {
// This may be on a CI server etc. Will fall back to pre-built assets.
return errors.FeatureNotAvailableErr
}
var configFile string
logger := t.rs.Logger
if t.options.Config != "" {
configFile = t.options.Config
} else {
configFile = "postcss.config.js"
}
configFile = filepath.Clean(configFile)
// We need an abolute filename to the config file.
if !filepath.IsAbs(configFile) {
// We resolve this against the virtual Work filesystem, to allow
// this config file to live in one of the themes if needed.
fi, err := t.rs.BaseFs.Work.Fs.Stat(configFile)
if err != nil {
if t.options.Config != "" {
// Only fail if the user specificed config file is not found.
return fmt.Errorf("postcss config %q not found: %s", configFile, err)
}
configFile = ""
} else {
configFile = fi.(hugofs.RealFilenameInfo).RealFilename()
}
}
var cmdArgs []string
if configFile != "" {
logger.INFO.Println("postcss: use config file", configFile)
cmdArgs = []string{"--config", configFile}
}
if optArgs := t.options.toArgs(); len(optArgs) > 0 {
cmdArgs = append(cmdArgs, optArgs...)
}
cmd := exec.Command(binary, cmdArgs...)
cmd.Stdout = ctx.To
cmd.Stderr = os.Stderr
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
go func() {
defer stdin.Close()
io.Copy(stdin, ctx.From)
}()
err = cmd.Run()
if err != nil {
return err
}
return nil
}
// Process transforms the given Resource with the PostCSS processor.
func (c *Client) Process(res resource.Resource, options Options) (resource.Resource, error) {
return c.rs.Transform(
res,
&postcssTransformation{rs: c.rs, options: options},
)
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -14,21 +14,25 @@
package resource
import (
"errors"
"fmt"
"io"
"io/ioutil"
"mime"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"sync"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/common/loggers"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/afero"
"github.com/spf13/cast"
"github.com/gobwas/glob"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/media"
@ -36,34 +40,39 @@ import (
)
var (
_ ContentResource = (*genericResource)(nil)
_ ReadSeekCloserResource = (*genericResource)(nil)
_ Resource = (*genericResource)(nil)
_ metaAssigner = (*genericResource)(nil)
_ Source = (*genericResource)(nil)
_ Cloner = (*genericResource)(nil)
_ ResourcesLanguageMerger = (*Resources)(nil)
_ permalinker = (*genericResource)(nil)
)
const DefaultResourceType = "unknown"
var noData = make(map[string]interface{})
// Source is an internal template and not meant for use in the templates. It
// may change without notice.
type Source interface {
AbsSourceFilename() string
Publish() error
}
type permalinker interface {
relPermalinkFor(target string) string
permalinkFor(target string) string
relTargetPathFor(target string) string
relTargetPath() string
targetPath() string
}
// Cloner is an internal template and not meant for use in the templates. It
// may change without notice.
type Cloner interface {
WithNewBase(base string) Resource
}
type metaAssigner interface {
setTitle(title string)
setName(name string)
updateParams(params map[string]interface{})
}
// Resource represents a linkable resource, i.e. a content page, image etc.
type Resource interface {
// Permalink represents the absolute link to this resource.
@ -77,6 +86,9 @@ type Resource interface {
// For content pages, this value is "page".
ResourceType() string
// MediaType is this resource's MIME type.
MediaType() media.Type
// Name is the logical name of this resource. This can be set in the front matter
// metadata for this resource. If not set, Hugo will assign a value.
// This will in most cases be the base filename.
@ -88,16 +100,12 @@ type Resource interface {
// Title returns the title if set in front matter. For content pages, this will be the expected value.
Title() string
// Resource specific data set by Hugo.
// One example would be.Data.Digest for fingerprinted resources.
Data() interface{}
// Params set in front matter for this resource.
Params() map[string]interface{}
// Content returns this resource's content. It will be equivalent to reading the content
// that RelPermalink points to in the published folder.
// The return type will be contextual, and should be what you would expect:
// * Page: template.HTML
// * JSON: String
// * Etc.
Content() (interface{}, error)
}
type ResourcesLanguageMerger interface {
@ -110,6 +118,40 @@ type translatedResource interface {
TranslationKey() string
}
// ContentResource represents a Resource that provides a way to get to its content.
// Most Resource types in Hugo implements this interface, including Page.
// This should be used with care, as it will read the file content into memory, but it
// should be cached as effectively as possible by the implementation.
type ContentResource interface {
Resource
// Content returns this resource's content. It will be equivalent to reading the content
// that RelPermalink points to in the published folder.
// The return type will be contextual, and should be what you would expect:
// * Page: template.HTML
// * JSON: String
// * Etc.
Content() (interface{}, error)
}
// ReadSeekCloser is implemented by afero.File. We use this as the common type for
// content in Resource objects, even for strings.
type ReadSeekCloser interface {
io.Reader
io.Seeker
io.Closer
}
// OpenReadSeekeCloser allows setting some other way (than reading from a filesystem)
// to open or create a ReadSeekCloser.
type OpenReadSeekCloser func() (ReadSeekCloser, error)
// ReadSeekCloserResource is a Resource that supports loading its content.
type ReadSeekCloserResource interface {
Resource
ReadSeekCloser() (ReadSeekCloser, error)
}
// Resources represents a slice of resources, which can be a mix of different types.
// I.e. both pages and images etc.
type Resources []Resource
@ -125,44 +167,6 @@ func (r Resources) ByType(tp string) Resources {
return filtered
}
const prefixDeprecatedMsg = `We have added the more flexible Resources.GetMatch (find one) and Resources.Match (many) to replace the "prefix" methods.
These matches by a given globbing pattern, e.g. "*.jpg".
Some examples:
* To find all resources by its prefix in the root dir of the bundle: .Match image*
* To find one resource by its prefix in the root dir of the bundle: .GetMatch image*
* To find all JPEG images anywhere in the bundle: .Match **.jpg`
// GetByPrefix gets the first resource matching the given filename prefix, e.g
// "logo" will match logo.png. It returns nil of none found.
// In potential ambiguous situations, combine it with ByType.
func (r Resources) GetByPrefix(prefix string) Resource {
helpers.Deprecated("Resources", "GetByPrefix", prefixDeprecatedMsg, true)
prefix = strings.ToLower(prefix)
for _, resource := range r {
if matchesPrefix(resource, prefix) {
return resource
}
}
return nil
}
// ByPrefix gets all resources matching the given base filename prefix, e.g
// "logo" will match logo.png.
func (r Resources) ByPrefix(prefix string) Resources {
helpers.Deprecated("Resources", "ByPrefix", prefixDeprecatedMsg, true)
var matches Resources
prefix = strings.ToLower(prefix)
for _, resource := range r {
if matchesPrefix(resource, prefix) {
matches = append(matches, resource)
}
}
return matches
}
// GetMatch finds the first Resource matching the given pattern, or nil if none found.
// See Match for a more complete explanation about the rules used.
func (r Resources) GetMatch(pattern string) Resource {
@ -204,10 +208,6 @@ func (r Resources) Match(pattern string) Resources {
return matches
}
func matchesPrefix(r Resource, prefix string) bool {
return strings.HasPrefix(strings.ToLower(r.Name()), prefix)
}
var (
globCache = make(map[string]glob.Glob)
globMu sync.RWMutex
@ -268,81 +268,180 @@ func (r1 Resources) MergeByLanguageInterface(in interface{}) (interface{}, error
type Spec struct {
*helpers.PathSpec
mimeTypes media.Types
MediaTypes media.Types
Logger *jww.Notepad
TextTemplates tpl.TemplateParseFinder
// Holds default filter settings etc.
imaging *Imaging
imageCache *imageCache
ResourceCache *ResourceCache
GenImagePath string
GenAssetsPath string
}
func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) {
func NewSpec(s *helpers.PathSpec, logger *jww.Notepad, mimeTypes media.Types) (*Spec, error) {
imaging, err := decodeImaging(s.Cfg.GetStringMap("imaging"))
if err != nil {
return nil, err
}
genImagePath := filepath.FromSlash("_gen/images")
if logger == nil {
logger = loggers.NewErrorLogger()
}
return &Spec{PathSpec: s,
genImagePath := filepath.FromSlash("_gen/images")
// The transformed assets (CSS etc.)
genAssetsPath := filepath.FromSlash("_gen/assets")
rs := &Spec{PathSpec: s,
Logger: logger,
GenImagePath: genImagePath,
imaging: &imaging, mimeTypes: mimeTypes, imageCache: newImageCache(
GenAssetsPath: genAssetsPath,
imaging: &imaging,
MediaTypes: mimeTypes,
imageCache: newImageCache(
s,
// We're going to write a cache pruning routine later, so make it extremely
// unlikely that the user shoots him or herself in the foot
// and this is set to a value that represents data he/she
// cares about. This should be set in stone once released.
genImagePath,
)}, nil
)}
rs.ResourceCache = newResourceCache(rs)
return rs, nil
}
func (r *Spec) NewResourceFromFile(
targetPathBuilder func(base string) string,
file source.File, relTargetFilename string) (Resource, error) {
type ResourceSourceDescriptor struct {
// TargetPathBuilder is a callback to create target paths's relative to its owner.
TargetPathBuilder func(base string) string
return r.newResource(targetPathBuilder, file.Filename(), file.FileInfo(), relTargetFilename)
// Need one of these to load the resource content.
SourceFile source.File
OpenReadSeekCloser OpenReadSeekCloser
// If OpenReadSeekerCloser is not set, we use this to open the file.
SourceFilename string
// The relative target filename without any language code.
RelTargetFilename string
// Any base path prepeneded to the permalink.
// Typically the language code if this resource should be published to its sub-folder.
URLBase string
// Any base path prepended to the target path. This will also typically be the
// language code, but setting it here means that it should not have any effect on
// the permalink.
TargetPathBase string
// Delay publishing until either Permalink or RelPermalink is called. Maybe never.
LazyPublish bool
}
func (r *Spec) NewResourceFromFilename(
targetPathBuilder func(base string) string,
absSourceFilename, relTargetFilename string) (Resource, error) {
fi, err := r.sourceFs().Stat(absSourceFilename)
if err != nil {
return nil, err
func (r ResourceSourceDescriptor) Filename() string {
if r.SourceFile != nil {
return r.SourceFile.Filename()
}
return r.newResource(targetPathBuilder, absSourceFilename, fi, relTargetFilename)
return r.SourceFilename
}
func (r *Spec) sourceFs() afero.Fs {
return r.PathSpec.BaseFs.ContentFs
return r.PathSpec.BaseFs.Content.Fs
}
func (r *Spec) newResource(
targetPathBuilder func(base string) string,
absSourceFilename string, fi os.FileInfo, relTargetFilename string) (Resource, error) {
func (r *Spec) New(fd ResourceSourceDescriptor) (Resource, error) {
return r.newResourceForFs(r.sourceFs(), fd)
}
var mimeType string
ext := filepath.Ext(relTargetFilename)
m, found := r.mimeTypes.GetBySuffix(strings.TrimPrefix(ext, "."))
if found {
mimeType = m.SubType
} else {
mimeType = mime.TypeByExtension(ext)
if mimeType == "" {
mimeType = DefaultResourceType
} else {
mimeType = mimeType[:strings.Index(mimeType, "/")]
func (r *Spec) NewForFs(sourceFs afero.Fs, fd ResourceSourceDescriptor) (Resource, error) {
return r.newResourceForFs(sourceFs, fd)
}
func (r *Spec) newResourceForFs(sourceFs afero.Fs, fd ResourceSourceDescriptor) (Resource, error) {
if fd.OpenReadSeekCloser == nil {
if fd.SourceFile != nil && fd.SourceFilename != "" {
return nil, errors.New("both SourceFile and AbsSourceFilename provided")
} else if fd.SourceFile == nil && fd.SourceFilename == "" {
return nil, errors.New("either SourceFile or AbsSourceFilename must be provided")
}
}
gr := r.newGenericResource(targetPathBuilder, fi, absSourceFilename, relTargetFilename, mimeType)
if fd.URLBase == "" {
fd.URLBase = r.GetURLLanguageBasePath()
}
if mimeType == "image" {
ext := strings.ToLower(helpers.Ext(absSourceFilename))
if fd.TargetPathBase == "" {
fd.TargetPathBase = r.GetTargetLanguageBasePath()
}
if fd.RelTargetFilename == "" {
fd.RelTargetFilename = fd.Filename()
}
return r.newResource(sourceFs, fd)
}
func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (Resource, error) {
var fi os.FileInfo
var sourceFilename string
if fd.OpenReadSeekCloser != nil {
} else if fd.SourceFilename != "" {
var err error
fi, err = sourceFs.Stat(fd.SourceFilename)
if err != nil {
return nil, err
}
sourceFilename = fd.SourceFilename
} else {
fi = fd.SourceFile.FileInfo()
sourceFilename = fd.SourceFile.Filename()
}
if fd.RelTargetFilename == "" {
fd.RelTargetFilename = sourceFilename
}
ext := filepath.Ext(fd.RelTargetFilename)
mimeType, found := r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, "."))
// TODO(bep) we need to handle these ambigous types better, but in this context
// we most likely want the application/xml type.
if mimeType.Suffix == "xml" && mimeType.SubType == "rss" {
mimeType, found = r.MediaTypes.GetByType("application/xml")
}
if !found {
mimeStr := mime.TypeByExtension(ext)
if mimeStr != "" {
mimeType, _ = media.FromString(mimeStr)
}
}
gr := r.newGenericResourceWithBase(
sourceFs,
fd.LazyPublish,
fd.OpenReadSeekCloser,
fd.URLBase,
fd.TargetPathBase,
fd.TargetPathBuilder,
fi,
sourceFilename,
fd.RelTargetFilename,
mimeType)
if mimeType.MainType == "image" {
ext := strings.ToLower(helpers.Ext(sourceFilename))
imgFormat, ok := imageFormats[ext]
if !ok {
@ -351,27 +450,21 @@ func (r *Spec) newResource(
return gr, nil
}
f, err := gr.sourceFs().Open(absSourceFilename)
if err != nil {
return nil, fmt.Errorf("failed to open image source file: %s", err)
}
defer f.Close()
hash, err := helpers.MD5FromFileFast(f)
if err != nil {
if err := gr.initHash(); err != nil {
return nil, err
}
return &Image{
hash: hash,
format: imgFormat,
imaging: r.imaging,
genericResource: gr}, nil
}
return gr, nil
}
func (r *Spec) IsInCache(key string) bool {
// TODO(bep) unify
func (r *Spec) IsInImageCache(key string) bool {
// This is used for cache pruning. We currently only have images, but we could
// imagine expanding on this.
return r.imageCache.isInCache(key)
@ -381,6 +474,11 @@ func (r *Spec) DeleteCacheByPrefix(prefix string) {
r.imageCache.deleteByPrefix(prefix)
}
func (r *Spec) ClearCaches() {
r.imageCache.clear()
r.ResourceCache.clear()
}
func (r *Spec) CacheStats() string {
r.imageCache.mu.RLock()
defer r.imageCache.mu.RUnlock()
@ -410,18 +508,54 @@ func (d dirFile) path() string {
return path.Join(d.dir, d.file)
}
type resourcePathDescriptor struct {
// The relative target directory and filename.
relTargetDirFile dirFile
// Callback used to construct a target path relative to its owner.
targetPathBuilder func(rel string) string
// baseURLDir is the fixed sub-folder for a resource in permalinks. This will typically
// be the language code if we publish to the language's sub-folder.
baseURLDir string
// This will normally be the same as above, but this will only apply to publishing
// of resources.
baseTargetPathDir string
// baseOffset is set when the output format's path has a offset, e.g. for AMP.
baseOffset string
}
type resourceContent struct {
content string
contentInit sync.Once
}
type resourceHash struct {
hash string
hashInit sync.Once
}
type publishOnce struct {
publisherInit sync.Once
publisherErr error
logger *jww.Notepad
}
func (l *publishOnce) publish(s Source) error {
l.publisherInit.Do(func() {
l.publisherErr = s.Publish()
if l.publisherErr != nil {
l.logger.ERROR.Printf("failed to publish Resource: %s", l.publisherErr)
}
})
return l.publisherErr
}
// genericResource represents a generic linkable resource.
type genericResource struct {
// The relative path to this resource.
relTargetPath dirFile
// Base is set when the output format's path has a offset, e.g. for AMP.
base string
resourcePathDescriptor
title string
name string
@ -433,6 +567,12 @@ type genericResource struct {
// the path to the file on the real filesystem.
sourceFilename string
// Will be set if this resource is backed by something other than a file.
openReadSeekerCloser OpenReadSeekCloser
// A hash of the source content. Is only calculated in caching situations.
*resourceHash
// This may be set to tell us to look in another filesystem for this resource.
// We, by default, use the sourceFs filesystem in the spec below.
overriddenSourceFs afero.Fs
@ -440,20 +580,87 @@ type genericResource struct {
spec *Spec
resourceType string
osFileInfo os.FileInfo
mediaType media.Type
targetPathBuilder func(rel string) string
osFileInfo os.FileInfo
// We create copies of this struct, so this needs to be a pointer.
*resourceContent
// May be set to signal lazy/delayed publishing.
*publishOnce
}
func (l *genericResource) Data() interface{} {
return noData
}
func (l *genericResource) Content() (interface{}, error) {
if err := l.initContent(); err != nil {
return nil, err
}
return l.content, nil
}
func (l *genericResource) ReadSeekCloser() (ReadSeekCloser, error) {
if l.openReadSeekerCloser != nil {
return l.openReadSeekerCloser()
}
f, err := l.sourceFs().Open(l.sourceFilename)
if err != nil {
return nil, err
}
return f, nil
}
func (l *genericResource) MediaType() media.Type {
return l.mediaType
}
// Implement the Cloner interface.
func (l genericResource) WithNewBase(base string) Resource {
l.baseOffset = base
l.resourceContent = &resourceContent{}
return &l
}
func (l *genericResource) initHash() error {
var err error
l.hashInit.Do(func() {
var hash string
var f ReadSeekCloser
f, err = l.ReadSeekCloser()
if err != nil {
err = fmt.Errorf("failed to open source file: %s", err)
return
}
defer f.Close()
hash, err = helpers.MD5FromFileFast(f)
if err != nil {
return
}
l.hash = hash
})
return err
}
func (l *genericResource) initContent() error {
var err error
l.contentInit.Do(func() {
var b []byte
var r ReadSeekCloser
r, err = l.ReadSeekCloser()
if err != nil {
return
}
defer r.Close()
b, err := afero.ReadFile(l.sourceFs(), l.AbsSourceFilename())
var b []byte
b, err = ioutil.ReadAll(r)
if err != nil {
return
}
@ -462,7 +669,7 @@ func (l *genericResource) Content() (interface{}, error) {
})
return l.content, err
return err
}
func (l *genericResource) sourceFs() afero.Fs {
@ -472,12 +679,36 @@ func (l *genericResource) sourceFs() afero.Fs {
return l.spec.sourceFs()
}
func (l *genericResource) publishIfNeeded() {
if l.publishOnce != nil {
l.publishOnce.publish(l)
}
}
func (l *genericResource) Permalink() string {
return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetPath.path(), false), l.spec.BaseURL.String())
l.publishIfNeeded()
return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path()), l.spec.BaseURL.HostURL())
}
func (l *genericResource) RelPermalink() string {
return l.relPermalinkForRel(l.relTargetPath.path(), true)
l.publishIfNeeded()
return l.relPermalinkFor(l.relTargetDirFile.path())
}
func (l *genericResource) relPermalinkFor(target string) string {
return l.relPermalinkForRel(target)
}
func (l *genericResource) permalinkFor(target string) string {
return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target), l.spec.BaseURL.HostURL())
}
func (l *genericResource) relTargetPathFor(target string) string {
return l.relTargetPathForRel(target, false)
}
func (l *genericResource) relTargetPath() string {
return l.relTargetPathForRel(l.targetPath(), false)
}
func (l *genericResource) Name() string {
@ -514,31 +745,33 @@ func (l *genericResource) updateParams(params map[string]interface{}) {
}
}
// Implement the Cloner interface.
func (l genericResource) WithNewBase(base string) Resource {
l.base = base
l.resourceContent = &resourceContent{}
return &l
func (l *genericResource) relPermalinkForRel(rel string) string {
return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, true))
}
func (l *genericResource) relPermalinkForRel(rel string, addBasePath bool) string {
return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, addBasePath))
}
func (l *genericResource) relTargetPathForRel(rel string, isURL bool) string {
func (l *genericResource) relTargetPathForRel(rel string, addBasePath bool) string {
if l.targetPathBuilder != nil {
rel = l.targetPathBuilder(rel)
}
if l.base != "" {
rel = path.Join(l.base, rel)
if isURL && l.baseURLDir != "" {
rel = path.Join(l.baseURLDir, rel)
}
if addBasePath && l.spec.PathSpec.BasePath != "" {
if !isURL && l.baseTargetPathDir != "" {
rel = path.Join(l.baseTargetPathDir, rel)
}
if l.baseOffset != "" {
rel = path.Join(l.baseOffset, rel)
}
if isURL && l.spec.PathSpec.BasePath != "" {
rel = path.Join(l.spec.PathSpec.BasePath, rel)
}
if rel[0] != '/' {
if len(rel) == 0 || rel[0] != '/' {
rel = "/" + rel
}
@ -549,146 +782,100 @@ func (l *genericResource) ResourceType() string {
return l.resourceType
}
func (l *genericResource) AbsSourceFilename() string {
return l.sourceFilename
}
func (l *genericResource) String() string {
return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name)
}
func (l *genericResource) Publish() error {
f, err := l.sourceFs().Open(l.AbsSourceFilename())
f, err := l.ReadSeekCloser()
if err != nil {
return err
}
defer f.Close()
return helpers.WriteToDisk(l.target(), f, l.spec.BaseFs.PublishFs)
return helpers.WriteToDisk(l.targetFilename(), f, l.spec.BaseFs.PublishFs)
}
const counterPlaceHolder = ":counter"
// AssignMetadata assigns the given metadata to those resources that supports updates
// and matching by wildcard given in `src` using `filepath.Match` with lower cased values.
// This assignment is additive, but the most specific match needs to be first.
// The `name` and `title` metadata field support shell-matched collection it got a match in.
// See https://golang.org/pkg/path/#Match
func AssignMetadata(metadata []map[string]interface{}, resources ...Resource) error {
counters := make(map[string]int)
for _, r := range resources {
if _, ok := r.(metaAssigner); !ok {
continue
}
var (
nameSet, titleSet bool
nameCounter, titleCounter = 0, 0
nameCounterFound, titleCounterFound bool
resourceSrcKey = strings.ToLower(r.Name())
)
ma := r.(metaAssigner)
for _, meta := range metadata {
src, found := meta["src"]
if !found {
return fmt.Errorf("missing 'src' in metadata for resource")
}
srcKey := strings.ToLower(cast.ToString(src))
glob, err := getGlob(srcKey)
if err != nil {
return fmt.Errorf("failed to match resource with metadata: %s", err)
}
match := glob.Match(resourceSrcKey)
if match {
if !nameSet {
name, found := meta["name"]
if found {
name := cast.ToString(name)
if !nameCounterFound {
nameCounterFound = strings.Contains(name, counterPlaceHolder)
}
if nameCounterFound && nameCounter == 0 {
counterKey := "name_" + srcKey
nameCounter = counters[counterKey] + 1
counters[counterKey] = nameCounter
}
ma.setName(replaceResourcePlaceholders(name, nameCounter))
nameSet = true
}
}
if !titleSet {
title, found := meta["title"]
if found {
title := cast.ToString(title)
if !titleCounterFound {
titleCounterFound = strings.Contains(title, counterPlaceHolder)
}
if titleCounterFound && titleCounter == 0 {
counterKey := "title_" + srcKey
titleCounter = counters[counterKey] + 1
counters[counterKey] = titleCounter
}
ma.setTitle((replaceResourcePlaceholders(title, titleCounter)))
titleSet = true
}
}
params, found := meta["params"]
if found {
m := cast.ToStringMap(params)
// Needed for case insensitive fetching of params values
maps.ToLower(m)
ma.updateParams(m)
}
}
}
}
return nil
// Path is stored with Unix style slashes.
func (l *genericResource) targetPath() string {
return l.relTargetDirFile.path()
}
func replaceResourcePlaceholders(in string, counter int) string {
return strings.Replace(in, counterPlaceHolder, strconv.Itoa(counter), -1)
func (l *genericResource) targetFilename() string {
return filepath.Clean(l.relTargetPath())
}
func (l *genericResource) target() string {
target := l.relTargetPathForRel(l.relTargetPath.path(), false)
if l.spec.PathSpec.Languages.IsMultihost() {
target = path.Join(l.spec.PathSpec.Language.Lang, target)
}
return filepath.Clean(target)
}
func (r *Spec) newGenericResource(
// TODO(bep) clean up below
func (r *Spec) newGenericResource(sourceFs afero.Fs,
targetPathBuilder func(base string) string,
osFileInfo os.FileInfo,
sourceFilename,
baseFilename string,
mediaType media.Type) *genericResource {
return r.newGenericResourceWithBase(
sourceFs,
false,
nil,
"",
"",
targetPathBuilder,
osFileInfo,
sourceFilename,
baseFilename,
resourceType string) *genericResource {
mediaType,
)
}
func (r *Spec) newGenericResourceWithBase(
sourceFs afero.Fs,
lazyPublish bool,
openReadSeekerCloser OpenReadSeekCloser,
urlBaseDir string,
targetPathBaseDir string,
targetPathBuilder func(base string) string,
osFileInfo os.FileInfo,
sourceFilename,
baseFilename string,
mediaType media.Type) *genericResource {
// This value is used both to construct URLs and file paths, but start
// with a Unix-styled path.
baseFilename = filepath.ToSlash(baseFilename)
baseFilename = helpers.ToSlashTrimLeading(baseFilename)
fpath, fname := path.Split(baseFilename)
return &genericResource{
var resourceType string
if mediaType.MainType == "image" {
resourceType = mediaType.MainType
} else {
resourceType = mediaType.SubType
}
pathDescriptor := resourcePathDescriptor{
baseURLDir: urlBaseDir,
baseTargetPathDir: targetPathBaseDir,
targetPathBuilder: targetPathBuilder,
relTargetDirFile: dirFile{dir: fpath, file: fname},
}
var po *publishOnce
if lazyPublish {
po = &publishOnce{logger: r.Logger}
}
return &genericResource{
openReadSeekerCloser: openReadSeekerCloser,
publishOnce: po,
resourcePathDescriptor: pathDescriptor,
overriddenSourceFs: sourceFs,
osFileInfo: osFileInfo,
sourceFilename: sourceFilename,
relTargetPath: dirFile{dir: fpath, file: fname},
mediaType: mediaType,
resourceType: resourceType,
spec: r,
params: make(map[string]interface{}),
name: baseFilename,
title: baseFilename,
resourceContent: &resourceContent{},
resourceHash: &resourceHash{},
}
}

241
resource/resource_cache.go Normal file
View File

@ -0,0 +1,241 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"encoding/json"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"sync"
"github.com/spf13/afero"
"github.com/BurntSushi/locker"
)
const (
CACHE_CLEAR_ALL = "clear_all"
CACHE_OTHER = "other"
)
type ResourceCache struct {
rs *Spec
cache map[string]Resource
sync.RWMutex
// Provides named resource locks.
nlocker *locker.Locker
}
// ResourceKeyPartition returns a partition name
// to allow for more fine grained cache flushes.
// It will return the file extension without the leading ".". If no
// extension, it will return "other".
func ResourceKeyPartition(filename string) string {
ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".")
if ext == "" {
ext = CACHE_OTHER
}
return ext
}
func newResourceCache(rs *Spec) *ResourceCache {
return &ResourceCache{
rs: rs,
cache: make(map[string]Resource),
nlocker: locker.NewLocker(),
}
}
func (c *ResourceCache) clear() {
c.Lock()
defer c.Unlock()
c.cache = make(map[string]Resource)
c.nlocker = locker.NewLocker()
}
func (c *ResourceCache) Contains(key string) bool {
key = c.cleanKey(filepath.ToSlash(key))
_, found := c.get(key)
return found
}
func (c *ResourceCache) cleanKey(key string) string {
return strings.TrimPrefix(path.Clean(key), "/")
}
func (c *ResourceCache) get(key string) (Resource, bool) {
c.RLock()
defer c.RUnlock()
r, found := c.cache[key]
return r, found
}
func (c *ResourceCache) GetOrCreate(partition, key string, f func() (Resource, error)) (Resource, error) {
key = c.cleanKey(path.Join(partition, key))
// First check in-memory cache.
r, found := c.get(key)
if found {
return r, nil
}
// This is a potentially long running operation, so get a named lock.
c.nlocker.Lock(key)
// Double check in-memory cache.
r, found = c.get(key)
if found {
c.nlocker.Unlock(key)
return r, nil
}
defer c.nlocker.Unlock(key)
r, err := f()
if err != nil {
return nil, err
}
c.set(key, r)
return r, nil
}
func (c *ResourceCache) getFilenames(key string) (string, string) {
filenameBase := filepath.Join(c.rs.GenAssetsPath, key)
filenameMeta := filenameBase + ".json"
filenameContent := filenameBase + ".content"
return filenameMeta, filenameContent
}
func (c *ResourceCache) getFromFile(key string) (afero.File, transformedResourceMetadata, bool) {
c.RLock()
defer c.RUnlock()
var meta transformedResourceMetadata
filenameMeta, filenameContent := c.getFilenames(key)
fMeta, err := c.rs.Resources.Fs.Open(filenameMeta)
if err != nil {
return nil, meta, false
}
defer fMeta.Close()
jsonContent, err := ioutil.ReadAll(fMeta)
if err != nil {
return nil, meta, false
}
if err := json.Unmarshal(jsonContent, &meta); err != nil {
return nil, meta, false
}
fContent, err := c.rs.Resources.Fs.Open(filenameContent)
if err != nil {
return nil, meta, false
}
return fContent, meta, true
}
// writeMeta writes the metadata to file and returns a writer for the content part.
func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata) (afero.File, error) {
filenameMeta, filenameContent := c.getFilenames(key)
raw, err := json.Marshal(meta)
if err != nil {
return nil, err
}
fm, err := c.openResourceFileForWriting(filenameMeta)
if err != nil {
return nil, err
}
if _, err := fm.Write(raw); err != nil {
return nil, err
}
return c.openResourceFileForWriting(filenameContent)
}
func (c *ResourceCache) openResourceFileForWriting(filename string) (afero.File, error) {
return openFileForWriting(c.rs.Resources.Fs, filename)
}
// openFileForWriting opens or creates the given file. If the target directory
// does not exist, it gets created.
func openFileForWriting(fs afero.Fs, filename string) (afero.File, error) {
filename = filepath.Clean(filename)
// Create will truncate if file already exists.
f, err := fs.Create(filename)
if err != nil {
if !os.IsNotExist(err) {
return nil, err
}
if err = fs.MkdirAll(filepath.Dir(filename), 0755); err != nil {
return nil, err
}
f, err = fs.Create(filename)
}
return f, err
}
func (c *ResourceCache) set(key string, r Resource) {
c.Lock()
defer c.Unlock()
c.cache[key] = r
}
func (c *ResourceCache) DeletePartitions(partitions ...string) {
partitionsSet := map[string]bool{
// Always clear out the resources not matching the partition.
"other": true,
}
for _, p := range partitions {
partitionsSet[p] = true
}
if partitionsSet[CACHE_CLEAR_ALL] {
c.clear()
return
}
c.Lock()
defer c.Unlock()
for k := range c.cache {
clear := false
partIdx := strings.Index(k, "/")
if partIdx == -1 {
clear = true
} else {
partition := k[:partIdx]
if partitionsSet[partition] {
clear = true
}
}
if clear {
delete(c.cache, k)
}
}
}

View File

@ -0,0 +1,129 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"fmt"
"strconv"
"github.com/spf13/cast"
"strings"
"github.com/gohugoio/hugo/common/maps"
)
var (
_ metaAssigner = (*genericResource)(nil)
)
// metaAssigner allows updating metadata in resources that supports it.
type metaAssigner interface {
setTitle(title string)
setName(name string)
updateParams(params map[string]interface{})
}
const counterPlaceHolder = ":counter"
// AssignMetadata assigns the given metadata to those resources that supports updates
// and matching by wildcard given in `src` using `filepath.Match` with lower cased values.
// This assignment is additive, but the most specific match needs to be first.
// The `name` and `title` metadata field support shell-matched collection it got a match in.
// See https://golang.org/pkg/path/#Match
func AssignMetadata(metadata []map[string]interface{}, resources ...Resource) error {
counters := make(map[string]int)
for _, r := range resources {
if _, ok := r.(metaAssigner); !ok {
continue
}
var (
nameSet, titleSet bool
nameCounter, titleCounter = 0, 0
nameCounterFound, titleCounterFound bool
resourceSrcKey = strings.ToLower(r.Name())
)
ma := r.(metaAssigner)
for _, meta := range metadata {
src, found := meta["src"]
if !found {
return fmt.Errorf("missing 'src' in metadata for resource")
}
srcKey := strings.ToLower(cast.ToString(src))
glob, err := getGlob(srcKey)
if err != nil {
return fmt.Errorf("failed to match resource with metadata: %s", err)
}
match := glob.Match(resourceSrcKey)
if match {
if !nameSet {
name, found := meta["name"]
if found {
name := cast.ToString(name)
if !nameCounterFound {
nameCounterFound = strings.Contains(name, counterPlaceHolder)
}
if nameCounterFound && nameCounter == 0 {
counterKey := "name_" + srcKey
nameCounter = counters[counterKey] + 1
counters[counterKey] = nameCounter
}
ma.setName(replaceResourcePlaceholders(name, nameCounter))
nameSet = true
}
}
if !titleSet {
title, found := meta["title"]
if found {
title := cast.ToString(title)
if !titleCounterFound {
titleCounterFound = strings.Contains(title, counterPlaceHolder)
}
if titleCounterFound && titleCounter == 0 {
counterKey := "title_" + srcKey
titleCounter = counters[counterKey] + 1
counters[counterKey] = titleCounter
}
ma.setTitle((replaceResourcePlaceholders(title, titleCounter)))
titleSet = true
}
}
params, found := meta["params"]
if found {
m := cast.ToStringMap(params)
// Needed for case insensitive fetching of params values
maps.ToLower(m)
ma.updateParams(m)
}
}
}
}
return nil
}
func replaceResourcePlaceholders(in string, counter int) string {
return strings.Replace(in, counterPlaceHolder, strconv.Itoa(counter), -1)
}

View File

@ -0,0 +1,230 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"testing"
"github.com/gohugoio/hugo/media"
"github.com/stretchr/testify/require"
)
func TestAssignMetadata(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
var foo1, foo2, foo3, logo1, logo2, logo3 Resource
var resources Resources
for _, this := range []struct {
metaData []map[string]interface{}
assertFunc func(err error)
}{
{[]map[string]interface{}{
{
"title": "My Resource",
"name": "My Name",
"src": "*",
},
}, func(err error) {
assert.Equal("My Resource", logo1.Title())
assert.Equal("My Name", logo1.Name())
assert.Equal("My Name", foo2.Name())
}},
{[]map[string]interface{}{
{
"title": "My Logo",
"src": "*loGo*",
},
{
"title": "My Resource",
"name": "My Name",
"src": "*",
},
}, func(err error) {
assert.Equal("My Logo", logo1.Title())
assert.Equal("My Logo", logo2.Title())
assert.Equal("My Name", logo1.Name())
assert.Equal("My Name", foo2.Name())
assert.Equal("My Name", foo3.Name())
assert.Equal("My Resource", foo3.Title())
}},
{[]map[string]interface{}{
{
"title": "My Logo",
"src": "*loGo*",
"params": map[string]interface{}{
"Param1": true,
"icon": "logo",
},
},
{
"title": "My Resource",
"src": "*",
"params": map[string]interface{}{
"Param2": true,
"icon": "resource",
},
},
}, func(err error) {
assert.NoError(err)
assert.Equal("My Logo", logo1.Title())
assert.Equal("My Resource", foo3.Title())
_, p1 := logo2.Params()["param1"]
_, p2 := foo2.Params()["param2"]
_, p1_2 := foo2.Params()["param1"]
_, p2_2 := logo2.Params()["param2"]
icon1, _ := logo2.Params()["icon"]
icon2, _ := foo2.Params()["icon"]
assert.True(p1)
assert.True(p2)
// Check merge
assert.True(p2_2)
assert.False(p1_2)
assert.Equal("logo", icon1)
assert.Equal("resource", icon2)
}},
{[]map[string]interface{}{
{
"name": "Logo Name #:counter",
"src": "*logo*",
},
{
"title": "Resource #:counter",
"name": "Name #:counter",
"src": "*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Resource #2", logo2.Title())
assert.Equal("Logo Name #1", logo2.Name())
assert.Equal("Resource #4", logo1.Title())
assert.Equal("Logo Name #2", logo1.Name())
assert.Equal("Resource #1", foo2.Title())
assert.Equal("Resource #3", foo1.Title())
assert.Equal("Name #2", foo1.Name())
assert.Equal("Resource #5", foo3.Title())
assert.Equal(logo2, resources.GetMatch("logo name #1*"))
}},
{[]map[string]interface{}{
{
"title": "Third Logo #:counter",
"src": "logo3.png",
},
{
"title": "Other Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Third Logo #1", logo3.Title())
assert.Equal("Name #3", logo3.Name())
assert.Equal("Other Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Other Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"title": "Third Logo",
"src": "logo3.png",
},
{
"title": "Other Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Third Logo", logo3.Title())
assert.Equal("Name #3", logo3.Name())
assert.Equal("Other Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Other Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"name": "third-logo",
"src": "logo3.png",
},
{
"title": "Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Logo #3", logo3.Title())
assert.Equal("third-logo", logo3.Name())
assert.Equal("Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"title": "Third Logo #:counter",
},
}, func(err error) {
// Missing src
assert.Error(err)
}},
{[]map[string]interface{}{
{
"title": "Title",
"src": "[]",
},
}, func(err error) {
// Invalid pattern
assert.Error(err)
}},
} {
foo2 = spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType)
logo2 = spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType)
foo1 = spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType)
logo1 = spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType)
foo3 = spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType)
logo3 = spec.newGenericResource(nil, nil, nil, "/b/logo3.png", "logo3.png", pngType)
resources = Resources{
foo2,
logo2,
foo1,
logo1,
foo3,
logo3,
}
this.assertFunc(AssignMetadata(this.metaData, resources...))
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -22,6 +22,8 @@ import (
"testing"
"time"
"github.com/gohugoio/hugo/media"
"github.com/stretchr/testify/require"
)
@ -29,7 +31,7 @@ func TestGenericResource(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
r := spec.newGenericResource(nil, nil, "/a/foo.css", "foo.css", "css")
r := spec.newGenericResource(nil, nil, nil, "/a/foo.css", "foo.css", media.CSSType)
assert.Equal("https://example.com/foo.css", r.Permalink())
assert.Equal("/foo.css", r.RelPermalink())
@ -44,7 +46,7 @@ func TestGenericResourceWithLinkFacory(t *testing.T) {
factory := func(s string) string {
return path.Join("/foo", s)
}
r := spec.newGenericResource(factory, nil, "/a/foo.css", "foo.css", "css")
r := spec.newGenericResource(nil, factory, nil, "/a/foo.css", "foo.css", media.CSSType)
assert.Equal("https://example.com/foo/foo.css", r.Permalink())
assert.Equal("/foo/foo.css", r.RelPermalink())
@ -58,8 +60,7 @@ func TestNewResourceFromFilename(t *testing.T) {
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
writeSource(t, spec.Fs, "content/a/b/data.json", "json")
r, err := spec.NewResourceFromFilename(nil,
filepath.FromSlash("a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
r, err := spec.New(ResourceSourceDescriptor{SourceFilename: "a/b/logo.png"})
assert.NoError(err)
assert.NotNil(r)
@ -67,7 +68,7 @@ func TestNewResourceFromFilename(t *testing.T) {
assert.Equal("/a/b/logo.png", r.RelPermalink())
assert.Equal("https://example.com/a/b/logo.png", r.Permalink())
r, err = spec.NewResourceFromFilename(nil, "a/b/data.json", "a/b/data.json")
r, err = spec.New(ResourceSourceDescriptor{SourceFilename: "a/b/data.json"})
assert.NoError(err)
assert.NotNil(r)
@ -84,8 +85,7 @@ func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
r, err := spec.NewResourceFromFilename(nil,
filepath.FromSlash("a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
r, err := spec.New(ResourceSourceDescriptor{SourceFilename: filepath.FromSlash("a/b/logo.png")})
assert.NoError(err)
assert.NotNil(r)
@ -93,18 +93,20 @@ func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
assert.Equal("/docs/a/b/logo.png", r.RelPermalink())
assert.Equal("https://example.com/docs/a/b/logo.png", r.Permalink())
img := r.(*Image)
assert.Equal(filepath.FromSlash("/a/b/logo.png"), img.target())
assert.Equal(filepath.FromSlash("/a/b/logo.png"), img.targetFilename())
}
var pngType, _ = media.FromString("image/png")
func TestResourcesByType(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
resources := Resources{
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
spec.newGenericResource(nil, nil, "/a/logo.png", "logo.css", "image"),
spec.newGenericResource(nil, nil, "/a/foo2.css", "foo2.css", "css"),
spec.newGenericResource(nil, nil, "/a/foo3.css", "foo3.css", "css")}
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/a/logo.png", "logo.css", pngType),
spec.newGenericResource(nil, nil, nil, "/a/foo2.css", "foo2.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/a/foo3.css", "foo3.css", media.CSSType)}
assert.Len(resources.ByType("css"), 3)
assert.Len(resources.ByType("image"), 1)
@ -115,25 +117,25 @@ func TestResourcesGetByPrefix(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
resources := Resources{
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image"),
spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image"),
spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css"),
spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css")}
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType),
spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType)}
assert.Nil(resources.GetByPrefix("asdf"))
assert.Equal("/logo1.png", resources.GetByPrefix("logo").RelPermalink())
assert.Equal("/logo1.png", resources.GetByPrefix("loGo").RelPermalink())
assert.Equal("/Logo2.png", resources.GetByPrefix("logo2").RelPermalink())
assert.Equal("/foo2.css", resources.GetByPrefix("foo2").RelPermalink())
assert.Equal("/foo1.css", resources.GetByPrefix("foo1").RelPermalink())
assert.Equal("/foo1.css", resources.GetByPrefix("foo1").RelPermalink())
assert.Nil(resources.GetByPrefix("asdfasdf"))
assert.Nil(resources.GetMatch("asdf*"))
assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink())
assert.Equal("/logo1.png", resources.GetMatch("loGo*").RelPermalink())
assert.Equal("/Logo2.png", resources.GetMatch("logo2*").RelPermalink())
assert.Equal("/foo2.css", resources.GetMatch("foo2*").RelPermalink())
assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink())
assert.Equal("/foo1.css", resources.GetMatch("foo1*").RelPermalink())
assert.Nil(resources.GetMatch("asdfasdf*"))
assert.Equal(2, len(resources.ByPrefix("logo")))
assert.Equal(1, len(resources.ByPrefix("logo2")))
assert.Equal(2, len(resources.Match("logo*")))
assert.Equal(1, len(resources.Match("logo2*")))
logo := resources.GetByPrefix("logo")
logo := resources.GetMatch("logo*")
assert.NotNil(logo.Params())
assert.Equal("logo1.png", logo.Name())
assert.Equal("logo1.png", logo.Title())
@ -144,14 +146,14 @@ func TestResourcesGetMatch(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
resources := Resources{
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image"),
spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image"),
spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css"),
spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css"),
spec.newGenericResource(nil, nil, "/b/c/foo4.css", "c/foo4.css", "css"),
spec.newGenericResource(nil, nil, "/b/c/foo5.css", "c/foo5.css", "css"),
spec.newGenericResource(nil, nil, "/b/c/d/foo6.css", "c/d/foo6.css", "css"),
spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType),
spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/b/c/foo4.css", "c/foo4.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/b/c/foo5.css", "c/foo5.css", media.CSSType),
spec.newGenericResource(nil, nil, nil, "/b/c/d/foo6.css", "c/d/foo6.css", media.CSSType),
}
assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink())
@ -186,226 +188,6 @@ func TestResourcesGetMatch(t *testing.T) {
}
func TestAssignMetadata(t *testing.T) {
assert := require.New(t)
spec := newTestResourceSpec(assert)
var foo1, foo2, foo3, logo1, logo2, logo3 Resource
var resources Resources
for _, this := range []struct {
metaData []map[string]interface{}
assertFunc func(err error)
}{
{[]map[string]interface{}{
{
"title": "My Resource",
"name": "My Name",
"src": "*",
},
}, func(err error) {
assert.Equal("My Resource", logo1.Title())
assert.Equal("My Name", logo1.Name())
assert.Equal("My Name", foo2.Name())
}},
{[]map[string]interface{}{
{
"title": "My Logo",
"src": "*loGo*",
},
{
"title": "My Resource",
"name": "My Name",
"src": "*",
},
}, func(err error) {
assert.Equal("My Logo", logo1.Title())
assert.Equal("My Logo", logo2.Title())
assert.Equal("My Name", logo1.Name())
assert.Equal("My Name", foo2.Name())
assert.Equal("My Name", foo3.Name())
assert.Equal("My Resource", foo3.Title())
}},
{[]map[string]interface{}{
{
"title": "My Logo",
"src": "*loGo*",
"params": map[string]interface{}{
"Param1": true,
"icon": "logo",
},
},
{
"title": "My Resource",
"src": "*",
"params": map[string]interface{}{
"Param2": true,
"icon": "resource",
},
},
}, func(err error) {
assert.NoError(err)
assert.Equal("My Logo", logo1.Title())
assert.Equal("My Resource", foo3.Title())
_, p1 := logo2.Params()["param1"]
_, p2 := foo2.Params()["param2"]
_, p1_2 := foo2.Params()["param1"]
_, p2_2 := logo2.Params()["param2"]
icon1, _ := logo2.Params()["icon"]
icon2, _ := foo2.Params()["icon"]
assert.True(p1)
assert.True(p2)
// Check merge
assert.True(p2_2)
assert.False(p1_2)
assert.Equal("logo", icon1)
assert.Equal("resource", icon2)
}},
{[]map[string]interface{}{
{
"name": "Logo Name #:counter",
"src": "*logo*",
},
{
"title": "Resource #:counter",
"name": "Name #:counter",
"src": "*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Resource #2", logo2.Title())
assert.Equal("Logo Name #1", logo2.Name())
assert.Equal("Resource #4", logo1.Title())
assert.Equal("Logo Name #2", logo1.Name())
assert.Equal("Resource #1", foo2.Title())
assert.Equal("Resource #3", foo1.Title())
assert.Equal("Name #2", foo1.Name())
assert.Equal("Resource #5", foo3.Title())
assert.Equal(logo2, resources.GetByPrefix("logo name #1"))
}},
{[]map[string]interface{}{
{
"title": "Third Logo #:counter",
"src": "logo3.png",
},
{
"title": "Other Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Third Logo #1", logo3.Title())
assert.Equal("Name #3", logo3.Name())
assert.Equal("Other Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Other Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"title": "Third Logo",
"src": "logo3.png",
},
{
"title": "Other Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Third Logo", logo3.Title())
assert.Equal("Name #3", logo3.Name())
assert.Equal("Other Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Other Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"name": "third-logo",
"src": "logo3.png",
},
{
"title": "Logo #:counter",
"name": "Name #:counter",
"src": "logo*",
},
}, func(err error) {
assert.NoError(err)
assert.Equal("Logo #3", logo3.Title())
assert.Equal("third-logo", logo3.Name())
assert.Equal("Logo #1", logo2.Title())
assert.Equal("Name #1", logo2.Name())
assert.Equal("Logo #2", logo1.Title())
assert.Equal("Name #2", logo1.Name())
}},
{[]map[string]interface{}{
{
"title": "Third Logo #:counter",
},
}, func(err error) {
// Missing src
assert.Error(err)
}},
{[]map[string]interface{}{
{
"title": "Title",
"src": "[]",
},
}, func(err error) {
// Invalid pattern
assert.Error(err)
}},
} {
foo2 = spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css")
logo2 = spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image")
foo1 = spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css")
logo1 = spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image")
foo3 = spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css")
logo3 = spec.newGenericResource(nil, nil, "/b/logo3.png", "logo3.png", "image")
resources = Resources{
foo2,
logo2,
foo1,
logo1,
foo3,
logo3,
}
this.assertFunc(AssignMetadata(this.metaData, resources...))
}
}
func BenchmarkResourcesByPrefix(b *testing.B) {
resources := benchResources(b)
prefixes := []string{"abc", "jkl", "nomatch", "sub/"}
rnd := rand.New(rand.NewSource(time.Now().Unix()))
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
resources.ByPrefix(prefixes[rnd.Intn(len(prefixes))])
}
})
}
func BenchmarkResourcesMatch(b *testing.B) {
resources := benchResources(b)
prefixes := []string{"abc*", "jkl*", "nomatch*", "sub/*"}
@ -428,7 +210,7 @@ func BenchmarkResourcesMatchA100(b *testing.B) {
a100 := strings.Repeat("a", 100)
pattern := "a*a*a*a*a*a*a*a*b"
resources := Resources{spec.newGenericResource(nil, nil, "/a/"+a100, a100, "css")}
resources := Resources{spec.newGenericResource(nil, nil, nil, "/a/"+a100, a100, media.CSSType)}
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -444,17 +226,17 @@ func benchResources(b *testing.B) Resources {
for i := 0; i < 30; i++ {
name := fmt.Sprintf("abcde%d_%d.css", i%5, i)
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
}
for i := 0; i < 30; i++ {
name := fmt.Sprintf("efghi%d_%d.css", i%5, i)
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
}
for i := 0; i < 30; i++ {
name := fmt.Sprintf("jklmn%d_%d.css", i%5, i)
resources = append(resources, spec.newGenericResource(nil, nil, "/b/sub/"+name, "sub/"+name, "css"))
resources = append(resources, spec.newGenericResource(nil, nil, nil, "/b/sub/"+name, "sub/"+name, media.CSSType))
}
return resources
@ -482,7 +264,7 @@ func BenchmarkAssignMetadata(b *testing.B) {
}
for i := 0; i < 20; i++ {
name := fmt.Sprintf("foo%d_%d.css", i%5, i)
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
}
b.StartTimer()

View File

@ -0,0 +1,76 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package templates contains functions for template processing of Resource objects.
package templates
import (
"fmt"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/tpl"
)
// Client contains methods to perform template processing of Resource objects.
type Client struct {
rs *resource.Spec
textTemplate tpl.TemplateParseFinder
}
// New creates a new Client with the given specification.
func New(rs *resource.Spec, textTemplate tpl.TemplateParseFinder) *Client {
if rs == nil {
panic("must provice a resource Spec")
}
if textTemplate == nil {
panic("must provide a textTemplate")
}
return &Client{rs: rs, textTemplate: textTemplate}
}
type executeAsTemplateTransform struct {
rs *resource.Spec
textTemplate tpl.TemplateParseFinder
targetPath string
data interface{}
}
func (t *executeAsTemplateTransform) Key() resource.ResourceTransformationKey {
return resource.NewResourceTransformationKey("execute-as-template", t.targetPath)
}
func (t *executeAsTemplateTransform) Transform(ctx *resource.ResourceTransformationCtx) error {
tplStr := helpers.ReaderToString(ctx.From)
templ, err := t.textTemplate.Parse(ctx.InPath, tplStr)
if err != nil {
return fmt.Errorf("failed to parse Resource %q as Template: %s", ctx.InPath, err)
}
ctx.OutPath = t.targetPath
return templ.Execute(ctx.To, t.data)
}
func (c *Client) ExecuteAsTemplate(res resource.Resource, targetPath string, data interface{}) (resource.Resource, error) {
return c.rs.Transform(
res,
&executeAsTemplateTransform{
rs: c.rs,
targetPath: helpers.ToSlashTrimLeading(targetPath),
textTemplate: c.textTemplate,
data: data,
},
)
}

View File

@ -33,7 +33,9 @@ func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *
cfg.Set("dataDir", "data")
cfg.Set("i18nDir", "i18n")
cfg.Set("layoutDir", "layouts")
cfg.Set("assetDir", "assets")
cfg.Set("archetypeDir", "archetypes")
cfg.Set("publishDir", "public")
imagingCfg := map[string]interface{}{
"resampleFilter": "linear",
@ -49,7 +51,7 @@ func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *
assert.NoError(err)
spec, err := NewSpec(s, media.DefaultTypes)
spec, err := NewSpec(s, nil, media.DefaultTypes)
assert.NoError(err)
return spec
}
@ -72,7 +74,9 @@ func newTestResourceOsFs(assert *require.Assertions) *Spec {
cfg.Set("dataDir", "data")
cfg.Set("i18nDir", "i18n")
cfg.Set("layoutDir", "layouts")
cfg.Set("assetDir", "assets")
cfg.Set("archetypeDir", "archetypes")
cfg.Set("publishDir", "public")
fs := hugofs.NewFrom(hugofs.Os, cfg)
fs.Destination = &afero.MemMapFs{}
@ -81,7 +85,7 @@ func newTestResourceOsFs(assert *require.Assertions) *Spec {
assert.NoError(err)
spec, err := NewSpec(s, media.DefaultTypes)
spec, err := NewSpec(s, nil, media.DefaultTypes)
assert.NoError(err)
return spec
@ -102,12 +106,11 @@ func fetchImageForSpec(spec *Spec, assert *require.Assertions, name string) *Ima
return r.(*Image)
}
func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) Resource {
func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) ContentResource {
src, err := os.Open(filepath.FromSlash("testdata/" + name))
assert.NoError(err)
assert.NoError(spec.BaseFs.ContentFs.MkdirAll(filepath.Dir(name), 0755))
out, err := spec.BaseFs.ContentFs.Create(name)
out, err := openFileForWriting(spec.BaseFs.Content.Fs, name)
assert.NoError(err)
_, err = io.Copy(out, src)
out.Close()
@ -118,10 +121,10 @@ func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) R
return path.Join("/a", s)
}
r, err := spec.NewResourceFromFilename(factory, name, name)
r, err := spec.New(ResourceSourceDescriptor{TargetPathBuilder: factory, SourceFilename: name})
assert.NoError(err)
return r
return r.(ContentResource)
}
func assertImageFile(assert *require.Assertions, fs afero.Fs, filename string, width, height int) {

View File

@ -0,0 +1,101 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scss
import (
"github.com/bep/go-tocss/scss"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugolib/filesystems"
"github.com/gohugoio/hugo/resource"
"github.com/mitchellh/mapstructure"
)
type Client struct {
rs *resource.Spec
sfs *filesystems.SourceFilesystem
}
func New(fs *filesystems.SourceFilesystem, rs *resource.Spec) (*Client, error) {
return &Client{sfs: fs, rs: rs}, nil
}
type Options struct {
// Hugo, will by default, just replace the extension of the source
// to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can
// control this by setting this, e.g. "styles/main.css" will create
// a Resource with that as a base for RelPermalink etc.
TargetPath string
// Default is nested.
// One of nested, expanded, compact, compressed.
OutputStyle string
// Precision of floating point math.
Precision int
// When enabled, Hugo will generate a source map.
EnableSourceMap bool
}
type options struct {
// The options we receive from the end user.
from Options
// The options we send to the SCSS library.
to scss.Options
}
func (c *Client) ToCSS(res resource.Resource, opts Options) (resource.Resource, error) {
internalOptions := options{
from: opts,
}
// Transfer values from client.
internalOptions.to.Precision = opts.Precision
internalOptions.to.OutputStyle = scss.OutputStyleFromString(opts.OutputStyle)
if internalOptions.to.Precision == 0 {
// bootstrap-sass requires 8 digits precision. The libsass default is 5.
// https://github.com/twbs/bootstrap-sass/blob/master/README.md#sass-number-precision
internalOptions.to.Precision = 8
}
return c.rs.Transform(
res,
&toCSSTransformation{c: c, options: internalOptions},
)
}
type toCSSTransformation struct {
c *Client
options options
}
func (t *toCSSTransformation) Key() resource.ResourceTransformationKey {
return resource.NewResourceTransformationKey("tocss", t.options.from)
}
func DecodeOptions(m map[string]interface{}) (opts Options, err error) {
if m == nil {
return
}
err = mapstructure.WeakDecode(m, &opts)
if opts.TargetPath != "" {
opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
}
return
}

View File

@ -0,0 +1,111 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build extended
package scss
import (
"fmt"
"io"
"path"
"strings"
"github.com/bep/go-tocss/scss"
"github.com/bep/go-tocss/scss/libsass"
"github.com/bep/go-tocss/tocss"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resource"
)
// Used in tests. This feature requires Hugo to be built with the extended tag.
func Supports() bool {
return true
}
func (t *toCSSTransformation) Transform(ctx *resource.ResourceTransformationCtx) error {
ctx.OutMediaType = media.CSSType
var outName string
if t.options.from.TargetPath != "" {
ctx.OutPath = t.options.from.TargetPath
} else {
ctx.ReplaceOutPathExtension(".css")
}
outName = path.Base(ctx.OutPath)
options := t.options
// We may allow the end user to add IncludePaths later, if we find a use
// case for that.
options.to.IncludePaths = t.c.sfs.RealDirs(path.Dir(ctx.SourcePath))
if ctx.InMediaType.SubType == media.SASSType.SubType {
options.to.SassSyntax = true
}
if options.from.EnableSourceMap {
options.to.SourceMapFilename = outName + ".map"
options.to.SourceMapRoot = t.c.rs.WorkingDir
// Setting this to the relative input filename will get the source map
// more correct for the main entry path (main.scss typically), but
// it will mess up the import mappings. As a workaround, we do a replacement
// in the source map itself (see below).
//options.InputPath = inputPath
options.to.OutputPath = outName
options.to.SourceMapContents = true
options.to.OmitSourceMapURL = false
options.to.EnableEmbeddedSourceMap = false
}
res, err := t.c.toCSS(options.to, ctx.To, ctx.From)
if err != nil {
return err
}
if options.from.EnableSourceMap && res.SourceMapContent != "" {
sourcePath := t.c.sfs.RealFilename(ctx.SourcePath)
if strings.HasPrefix(sourcePath, t.c.rs.WorkingDir) {
sourcePath = strings.TrimPrefix(sourcePath, t.c.rs.WorkingDir+helpers.FilePathSeparator)
}
// This is a workaround for what looks like a bug in Libsass. But
// getting this resolution correct in tools like Chrome Workspaces
// is important enough to go this extra mile.
mapContent := strings.Replace(res.SourceMapContent, `stdin",`, fmt.Sprintf("%s\",", sourcePath), 1)
return ctx.PublishSourceMap(mapContent)
}
return nil
}
func (c *Client) toCSS(options scss.Options, dst io.Writer, src io.Reader) (tocss.Result, error) {
var res tocss.Result
transpiler, err := libsass.New(options)
if err != nil {
return res, err
}
res, err = transpiler.Execute(dst, src)
if err != nil {
return res, fmt.Errorf("SCSS processing failed: %s", err)
}
return res, nil
}

View File

@ -0,0 +1,30 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build !extended
package scss
import (
"github.com/gohugoio/hugo/common/errors"
"github.com/gohugoio/hugo/resource"
)
// Used in tests.
func Supports() bool {
return false
}
func (t *toCSSTransformation) Transform(ctx *resource.ResourceTransformationCtx) error {
return errors.FeatureNotAvailableErr
}

487
resource/transform.go Normal file
View File

@ -0,0 +1,487 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"bytes"
"path"
"strconv"
"github.com/gohugoio/hugo/common/errors"
"github.com/gohugoio/hugo/helpers"
"github.com/mitchellh/hashstructure"
"github.com/spf13/afero"
"fmt"
"io"
"sync"
"github.com/gohugoio/hugo/media"
bp "github.com/gohugoio/hugo/bufferpool"
)
var (
_ ContentResource = (*transformedResource)(nil)
_ ReadSeekCloserResource = (*transformedResource)(nil)
)
func (s *Spec) Transform(r Resource, t ResourceTransformation) (Resource, error) {
return &transformedResource{
Resource: r,
transformation: t,
transformedResourceMetadata: transformedResourceMetadata{MetaData: make(map[string]interface{})},
cache: s.ResourceCache}, nil
}
type ResourceTransformationCtx struct {
// The content to transform.
From io.Reader
// The target of content transformation.
// The current implementation requires that r is written to w
// even if no transformation is performed.
To io.Writer
// This is the relative path to the original source. Unix styled slashes.
SourcePath string
// This is the relative target path to the resource. Unix styled slashes.
InPath string
// The relative target path to the transformed resource. Unix styled slashes.
OutPath string
// The input media type
InMediaType media.Type
// The media type of the transformed resource.
OutMediaType media.Type
// Data data can be set on the transformed Resource. Not that this need
// to be simple types, as it needs to be serialized to JSON and back.
Data map[string]interface{}
// This is used to publis additional artifacts, e.g. source maps.
// We may improve this.
OpenResourcePublisher func(relTargetPath string) (io.WriteCloser, error)
}
// AddOutPathIdentifier transforming InPath to OutPath adding an identifier,
// eg '.min' before any extension.
func (ctx *ResourceTransformationCtx) AddOutPathIdentifier(identifier string) {
ctx.OutPath = ctx.addPathIdentifier(ctx.InPath, identifier)
}
func (ctx *ResourceTransformationCtx) addPathIdentifier(inPath, identifier string) string {
dir, file := path.Split(inPath)
base, ext := helpers.PathAndExt(file)
return path.Join(dir, (base + identifier + ext))
}
// ReplaceOutPathExtension transforming InPath to OutPath replacing the file
// extension, e.g. ".scss"
func (ctx *ResourceTransformationCtx) ReplaceOutPathExtension(newExt string) {
dir, file := path.Split(ctx.InPath)
base, _ := helpers.PathAndExt(file)
ctx.OutPath = path.Join(dir, (base + newExt))
}
// PublishSourceMap writes the content to the target folder of the main resource
// with the ".map" extension added.
func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error {
target := ctx.OutPath + ".map"
f, err := ctx.OpenResourcePublisher(target)
if err != nil {
return err
}
defer f.Close()
_, err = f.Write([]byte(content))
return err
}
// ResourceTransformationKey are provided by the different transformation implementations.
// It identifies the transformation (name) and its configuration (elements).
// We combine this in a chain with the rest of the transformations
// with the target filename and a content hash of the origin to use as cache key.
type ResourceTransformationKey struct {
name string
elements []interface{}
}
// NewResourceTransformationKey creates a new ResourceTransformationKey from the transformation
// name and elements. We will create a 64 bit FNV hash from the elements, which when combined
// with the other key elements should be unique for all practical applications.
func NewResourceTransformationKey(name string, elements ...interface{}) ResourceTransformationKey {
return ResourceTransformationKey{name: name, elements: elements}
}
// Do not change this without good reasons.
func (k ResourceTransformationKey) key() string {
if len(k.elements) == 0 {
return k.name
}
sb := bp.GetBuffer()
defer bp.PutBuffer(sb)
sb.WriteString(k.name)
for _, element := range k.elements {
hash, err := hashstructure.Hash(element, nil)
if err != nil {
panic(err)
}
sb.WriteString("_")
sb.WriteString(strconv.FormatUint(hash, 10))
}
return sb.String()
}
// ResourceTransformation is the interface that a resource transformation step
// needs to implement.
type ResourceTransformation interface {
Key() ResourceTransformationKey
Transform(ctx *ResourceTransformationCtx) error
}
// We will persist this information to disk.
type transformedResourceMetadata struct {
Target string `json:"Target"`
MediaTypeV string `json:"MediaType"`
MetaData map[string]interface{} `json:"Data"`
}
type transformedResource struct {
cache *ResourceCache
// This is the filename inside resources/_gen/assets
sourceFilename string
linker permalinker
// The transformation to apply.
transformation ResourceTransformation
// We apply the tranformations lazily.
transformInit sync.Once
transformErr error
// The transformed values
content string
contentInit sync.Once
transformedResourceMetadata
// The source
Resource
}
func (r *transformedResource) ReadSeekCloser() (ReadSeekCloser, error) {
rc, ok := r.Resource.(ReadSeekCloserResource)
if !ok {
return nil, fmt.Errorf("resource %T is not a ReadSeekerCloserResource", rc)
}
return rc.ReadSeekCloser()
}
func (r *transformedResource) transferTransformedValues(another *transformedResource) {
if another.content != "" {
r.contentInit.Do(func() {
r.content = another.content
})
}
r.transformedResourceMetadata = another.transformedResourceMetadata
}
func (r *transformedResource) tryTransformedFileCache(key string) io.ReadCloser {
f, meta, found := r.cache.getFromFile(key)
if !found {
return nil
}
r.transformedResourceMetadata = meta
r.sourceFilename = f.Name()
return f
}
func (r *transformedResource) Content() (interface{}, error) {
if err := r.initTransform(true); err != nil {
return nil, err
}
if err := r.initContent(); err != nil {
return "", err
}
return r.content, nil
}
func (r *transformedResource) Data() interface{} {
return r.MetaData
}
func (r *transformedResource) MediaType() media.Type {
if err := r.initTransform(false); err != nil {
return media.Type{}
}
m, _ := r.cache.rs.MediaTypes.GetByType(r.MediaTypeV)
return m
}
func (r *transformedResource) Permalink() string {
if err := r.initTransform(false); err != nil {
return ""
}
return r.linker.permalinkFor(r.Target)
}
func (r *transformedResource) RelPermalink() string {
if err := r.initTransform(false); err != nil {
return ""
}
return r.linker.relPermalinkFor(r.Target)
}
func (r *transformedResource) initContent() error {
var err error
r.contentInit.Do(func() {
var b []byte
b, err := afero.ReadFile(r.cache.rs.Resources.Fs, r.sourceFilename)
if err != nil {
return
}
r.content = string(b)
})
return err
}
func (r *transformedResource) transform(setContent bool) (err error) {
openPublishFileForWriting := func(relTargetPath string) (io.WriteCloser, error) {
return openFileForWriting(r.cache.rs.PublishFs, r.linker.relTargetPathFor(relTargetPath))
}
// This can be the last resource in a chain.
// Rewind and create a processing chain.
var chain []Resource
current := r
for {
rr := current.Resource
chain = append(chain[:0], append([]Resource{rr}, chain[0:]...)...)
if tr, ok := rr.(*transformedResource); ok {
current = tr
} else {
break
}
}
// Append the current transformer at the end
chain = append(chain, r)
first := chain[0]
contentrc, err := contentReadSeekerCloser(first)
if err != nil {
return err
}
defer contentrc.Close()
// Files with a suffix will be stored in cache (both on disk and in memory)
// partitioned by their suffix. There will be other files below /other.
// This partition is also how we determine what to delete on server reloads.
var key, base string
for _, element := range chain {
switch v := element.(type) {
case *transformedResource:
key = key + "_" + v.transformation.Key().key()
case permalinker:
r.linker = v
p := v.relTargetPath()
if p == "" {
panic("target path needed for key creation")
}
partition := ResourceKeyPartition(p)
base = partition + "/" + p
default:
return fmt.Errorf("transformation not supported for type %T", element)
}
}
key = r.cache.cleanKey(base + "_" + helpers.MD5String(key))
cached, found := r.cache.get(key)
if found {
r.transferTransformedValues(cached.(*transformedResource))
return
}
// Acquire a write lock for the named transformation.
r.cache.nlocker.Lock(key)
// Check the cache again.
cached, found = r.cache.get(key)
if found {
r.transferTransformedValues(cached.(*transformedResource))
r.cache.nlocker.Unlock(key)
return
}
defer r.cache.nlocker.Unlock(key)
defer r.cache.set(key, r)
b1 := bp.GetBuffer()
b2 := bp.GetBuffer()
defer bp.PutBuffer(b1)
defer bp.PutBuffer(b2)
tctx := &ResourceTransformationCtx{
Data: r.transformedResourceMetadata.MetaData,
OpenResourcePublisher: openPublishFileForWriting,
}
tctx.InMediaType = first.MediaType()
tctx.OutMediaType = first.MediaType()
tctx.From = contentrc
tctx.To = b1
if r.linker != nil {
tctx.InPath = r.linker.targetPath()
tctx.SourcePath = tctx.InPath
}
counter := 0
var transformedContentr io.Reader
for _, element := range chain {
tr, ok := element.(*transformedResource)
if !ok {
continue
}
counter++
if counter != 1 {
tctx.InMediaType = tctx.OutMediaType
}
if counter%2 == 0 {
tctx.From = b1
b2.Reset()
tctx.To = b2
} else {
if counter != 1 {
// The first reader is the file.
tctx.From = b2
}
b1.Reset()
tctx.To = b1
}
if err := tr.transformation.Transform(tctx); err != nil {
if err == errors.FeatureNotAvailableErr {
// This transformation is not available in this
// Hugo installation (scss not compiled in, PostCSS not available etc.)
// If a prepared bundle for this transformation chain is available, use that.
f := r.tryTransformedFileCache(key)
if f == nil {
return fmt.Errorf("failed to transform %q (%s): %s", tctx.InPath, tctx.InMediaType.Type(), err)
}
transformedContentr = f
defer f.Close()
// The reader above is all we need.
break
}
// Abort.
return err
}
if tctx.OutPath != "" {
tctx.InPath = tctx.OutPath
tctx.OutPath = ""
}
}
if transformedContentr == nil {
r.Target = tctx.InPath
r.MediaTypeV = tctx.OutMediaType.Type()
}
publicw, err := openPublishFileForWriting(r.Target)
if err != nil {
r.transformErr = err
return
}
defer publicw.Close()
publishwriters := []io.Writer{publicw}
if transformedContentr == nil {
// Also write it to the cache
metaw, err := r.cache.writeMeta(key, r.transformedResourceMetadata)
if err != nil {
return err
}
r.sourceFilename = metaw.Name()
defer metaw.Close()
publishwriters = append(publishwriters, metaw)
if counter > 0 {
transformedContentr = tctx.To.(*bytes.Buffer)
} else {
transformedContentr = contentrc
}
}
// Also write it to memory
var contentmemw *bytes.Buffer
if setContent {
contentmemw = bp.GetBuffer()
defer bp.PutBuffer(contentmemw)
publishwriters = append(publishwriters, contentmemw)
}
publishw := io.MultiWriter(publishwriters...)
_, r.transformErr = io.Copy(publishw, transformedContentr)
if setContent {
r.contentInit.Do(func() {
r.content = contentmemw.String()
})
}
return nil
}
func (r *transformedResource) initTransform(setContent bool) error {
r.transformInit.Do(func() {
if err := r.transform(setContent); err != nil {
r.transformErr = err
r.cache.rs.Logger.ERROR.Println("error: failed to transform resource:", err)
}
})
return r.transformErr
}
// contentReadSeekerCloser returns a ReadSeekerCloser if possible for a given Resource.
func contentReadSeekerCloser(r Resource) (ReadSeekCloser, error) {
switch rr := r.(type) {
case ReadSeekCloserResource:
rc, err := rr.ReadSeekCloser()
if err != nil {
return nil, err
}
return rc, nil
default:
return nil, fmt.Errorf("cannot tranform content of Resource of type %T", r)
}
}

View File

@ -0,0 +1,36 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resource
import (
"testing"
"github.com/stretchr/testify/require"
)
type testStruct struct {
Name string
V1 int64
V2 int32
V3 int
V4 uint64
}
func TestResourceTransformationKey(t *testing.T) {
// We really need this key to be portable across OSes.
key := NewResourceTransformationKey("testing",
testStruct{Name: "test", V1: int64(10), V2: int32(20), V3: 30, V4: uint64(40)})
assert := require.New(t)
assert.Equal(key.key(), "testing_518996646957295636")
}

View File

@ -75,12 +75,18 @@ func newTestConfig() *viper.Viper {
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("archetypeDir", "archetypes")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
v.Set("assetDir", "assets")
return v
}
func newTestSourceSpec() *SourceSpec {
v := newTestConfig()
fs := hugofs.NewMem(v)
ps, _ := helpers.NewPathSpec(fs, v)
ps, err := helpers.NewPathSpec(fs, v)
if err != nil {
panic(err)
}
return NewSourceSpec(ps, fs.Source)
}

View File

@ -25,8 +25,8 @@ import (
type templateFinder int
func (templateFinder) Lookup(name string) *tpl.TemplateAdapter {
return nil
func (templateFinder) Lookup(name string) (tpl.Template, bool) {
return nil, false
}
func (templateFinder) GetFuncs() map[string]interface{} {

View File

@ -37,14 +37,14 @@ func init() {
ns.AddMethodMapping(ctx.ReadDir,
[]string{"readDir"},
[][2]string{
{`{{ range (readDir ".") }}{{ .Name }}{{ end }}`, "README.txt"},
{`{{ range (readDir "files") }}{{ .Name }}{{ end }}`, "README.txt"},
},
)
ns.AddMethodMapping(ctx.ReadFile,
[]string{"readFile"},
[][2]string{
{`{{ readFile "README.txt" }}`, `Hugo Rocks!`},
{`{{ readFile "files/README.txt" }}`, `Hugo Rocks!`},
},
)

View File

@ -34,7 +34,7 @@ func New(deps *deps.Deps) *Namespace {
if deps.Fs != nil {
rfs = deps.Fs.WorkingDir
if deps.PathSpec != nil && deps.PathSpec.BaseFs != nil {
rfs = afero.NewReadOnlyFs(afero.NewCopyOnWriteFs(deps.PathSpec.BaseFs.ContentFs, deps.Fs.WorkingDir))
rfs = afero.NewReadOnlyFs(afero.NewCopyOnWriteFs(deps.PathSpec.BaseFs.Content.Fs, deps.Fs.WorkingDir))
}
}

View File

@ -63,12 +63,13 @@ func (ns *Namespace) Include(name string, contextList ...interface{}) (interface
}
for _, n := range []string{"partials/" + name, "theme/partials/" + name} {
templ := ns.deps.Tmpl.Lookup(n)
if templ == nil {
templ, found := ns.deps.Tmpl.Lookup(n)
if !found {
// For legacy reasons.
templ = ns.deps.Tmpl.Lookup(n + ".html")
templ, found = ns.deps.Tmpl.Lookup(n + ".html")
}
if templ != nil {
if found {
b := bp.GetBuffer()
defer bp.PutBuffer(b)
@ -76,7 +77,7 @@ func (ns *Namespace) Include(name string, contextList ...interface{}) (interface
return "", err
}
if _, ok := templ.Template.(*texttemplate.Template); ok {
if _, ok := templ.(*texttemplate.Template); ok {
s := b.String()
if ns.deps.Metrics != nil {
ns.deps.Metrics.TrackValue(n, s)

68
tpl/resources/init.go Normal file
View File

@ -0,0 +1,68 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resources
import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/tpl/internal"
)
const name = "resources"
func init() {
f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
ctx, err := New(d)
if err != nil {
// TODO(bep) no panic.
panic(err)
}
ns := &internal.TemplateFuncsNamespace{
Name: name,
Context: func(args ...interface{}) interface{} { return ctx },
}
ns.AddMethodMapping(ctx.Get,
nil,
[][2]string{},
)
// Add aliases for the most common transformations.
ns.AddMethodMapping(ctx.Fingerprint,
[]string{"fingerprint"},
[][2]string{},
)
ns.AddMethodMapping(ctx.Minify,
[]string{"minify"},
[][2]string{},
)
ns.AddMethodMapping(ctx.ToCSS,
[]string{"toCSS"},
[][2]string{},
)
ns.AddMethodMapping(ctx.PostCSS,
[]string{"postCSS"},
[][2]string{},
)
return ns
}
internal.AddTemplateFuncsNamespace(f)
}

255
tpl/resources/resources.go Normal file
View File

@ -0,0 +1,255 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package resources
import (
"errors"
"fmt"
"path/filepath"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/resource"
"github.com/gohugoio/hugo/resource/bundler"
"github.com/gohugoio/hugo/resource/create"
"github.com/gohugoio/hugo/resource/integrity"
"github.com/gohugoio/hugo/resource/minifiers"
"github.com/gohugoio/hugo/resource/postcss"
"github.com/gohugoio/hugo/resource/templates"
"github.com/gohugoio/hugo/resource/tocss/scss"
"github.com/spf13/cast"
)
// New returns a new instance of the resources-namespaced template functions.
func New(deps *deps.Deps) (*Namespace, error) {
scssClient, err := scss.New(deps.BaseFs.Assets, deps.ResourceSpec)
if err != nil {
return nil, err
}
return &Namespace{
deps: deps,
scssClient: scssClient,
createClient: create.New(deps.ResourceSpec),
bundlerClient: bundler.New(deps.ResourceSpec),
integrityClient: integrity.New(deps.ResourceSpec),
minifyClient: minifiers.New(deps.ResourceSpec),
postcssClient: postcss.New(deps.ResourceSpec),
templatesClient: templates.New(deps.ResourceSpec, deps.TextTmpl),
}, nil
}
// Namespace provides template functions for the "resources" namespace.
type Namespace struct {
deps *deps.Deps
createClient *create.Client
bundlerClient *bundler.Client
scssClient *scss.Client
integrityClient *integrity.Client
minifyClient *minifiers.Client
postcssClient *postcss.Client
templatesClient *templates.Client
}
// Get locates the filename given in Hugo's filesystems: static, assets and content (in that order)
// and creates a Resource object that can be used for further transformations.
func (ns *Namespace) Get(filename interface{}) (resource.Resource, error) {
filenamestr, err := cast.ToStringE(filename)
if err != nil {
return nil, err
}
filenamestr = filepath.Clean(filenamestr)
// Resource Get'ing is currently limited to /assets to make it simpler
// to control the behaviour of publishing and partial rebuilding.
return ns.createClient.Get(ns.deps.BaseFs.Assets.Fs, filenamestr)
}
// Concat concatenates a slice of Resource objects. These resources must
// (currently) be of the same Media Type.
func (ns *Namespace) Concat(targetPathIn interface{}, r []interface{}) (resource.Resource, error) {
targetPath, err := cast.ToStringE(targetPathIn)
if err != nil {
return nil, err
}
rr := make([]resource.Resource, len(r))
for i := 0; i < len(r); i++ {
rv, ok := r[i].(resource.Resource)
if !ok {
return nil, fmt.Errorf("cannot concat type %T", rv)
}
rr[i] = rv
}
return ns.bundlerClient.Concat(targetPath, rr)
}
// FromString creates a Resource from a string published to the relative target path.
func (ns *Namespace) FromString(targetPathIn, contentIn interface{}) (resource.Resource, error) {
targetPath, err := cast.ToStringE(targetPathIn)
if err != nil {
return nil, err
}
content, err := cast.ToStringE(contentIn)
if err != nil {
return nil, err
}
return ns.createClient.FromString(targetPath, content)
}
// ExecuteAsTemplate creates a Resource from a Go template, parsed and executed with
// the given data, and published to the relative target path.
func (ns *Namespace) ExecuteAsTemplate(args ...interface{}) (resource.Resource, error) {
if len(args) != 3 {
return nil, fmt.Errorf("must provide targetPath, the template data context and a Resource object")
}
targetPath, err := cast.ToStringE(args[0])
if err != nil {
return nil, err
}
data := args[1]
r, ok := args[2].(resource.Resource)
if !ok {
return nil, fmt.Errorf("type %T not supported in Resource transformations", args[2])
}
return ns.templatesClient.ExecuteAsTemplate(r, targetPath, data)
}
// Fingerprint transforms the given Resource with a MD5 hash of the content in
// the RelPermalink and Permalink.
func (ns *Namespace) Fingerprint(args ...interface{}) (resource.Resource, error) {
if len(args) < 1 || len(args) > 2 {
return nil, errors.New("must provide a Resource and (optional) crypto algo")
}
var algo string
resIdx := 0
if len(args) == 2 {
resIdx = 1
var err error
algo, err = cast.ToStringE(args[0])
if err != nil {
return nil, err
}
}
r, ok := args[resIdx].(resource.Resource)
if !ok {
return nil, fmt.Errorf("%T is not a Resource", args[resIdx])
}
return ns.integrityClient.Fingerprint(r, algo)
}
// Minify minifies the given Resource using the MediaType to pick the correct
// minifier.
func (ns *Namespace) Minify(r resource.Resource) (resource.Resource, error) {
return ns.minifyClient.Minify(r)
}
// ToCSS converts the given Resource to CSS. You can optional provide an Options
// object or a target path (string) as first argument.
func (ns *Namespace) ToCSS(args ...interface{}) (resource.Resource, error) {
var (
r resource.Resource
m map[string]interface{}
targetPath string
err error
ok bool
)
r, targetPath, ok = ns.resolveIfFirstArgIsString(args)
if !ok {
r, m, err = ns.resolveArgs(args)
if err != nil {
return nil, err
}
}
var options scss.Options
if targetPath != "" {
options.TargetPath = targetPath
} else if m != nil {
options, err = scss.DecodeOptions(m)
if err != nil {
return nil, err
}
}
return ns.scssClient.ToCSS(r, options)
}
// PostCSS processes the given Resource with PostCSS
func (ns *Namespace) PostCSS(args ...interface{}) (resource.Resource, error) {
r, m, err := ns.resolveArgs(args)
if err != nil {
return nil, err
}
var options postcss.Options
if m != nil {
options, err = postcss.DecodeOptions(m)
if err != nil {
return nil, err
}
}
return ns.postcssClient.Process(r, options)
}
// We allow string or a map as the first argument in some cases.
func (ns *Namespace) resolveIfFirstArgIsString(args []interface{}) (resource.Resource, string, bool) {
if len(args) != 2 {
return nil, "", false
}
v1, ok1 := args[0].(string)
if !ok1 {
return nil, "", false
}
v2, ok2 := args[1].(resource.Resource)
return v2, v1, ok2
}
// This roundabout way of doing it is needed to get both pipeline behaviour and options as arguments.
func (ns *Namespace) resolveArgs(args []interface{}) (resource.Resource, map[string]interface{}, error) {
if len(args) == 0 {
return nil, nil, errors.New("no Resource provided in transformation")
}
if len(args) == 1 {
r, ok := args[0].(resource.Resource)
if !ok {
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
}
return r, nil, nil
}
r, ok := args[1].(resource.Resource)
if !ok {
return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
}
m, err := cast.ToStringMapE(args[0])
if err != nil {
return nil, nil, fmt.Errorf("invalid options type: %s", err)
}
return r, m, nil
}

View File

@ -38,13 +38,15 @@ type TemplateHandler interface {
LoadTemplates(prefix string)
PrintErrors()
NewTextTemplate() TemplateParseFinder
MarkReady()
RebuildClone()
}
// TemplateFinder finds templates.
type TemplateFinder interface {
Lookup(name string) *TemplateAdapter
Lookup(name string) (Template, bool)
}
// Template is the common interface between text/template and html/template.
@ -53,6 +55,17 @@ type Template interface {
Name() string
}
// TemplateParser is used to parse ad-hoc templates, e.g. in the Resource chain.
type TemplateParser interface {
Parse(name, tpl string) (Template, error)
}
// TemplateParseFinder provides both parsing and finding.
type TemplateParseFinder interface {
TemplateParser
TemplateFinder
}
// TemplateExecutor adds some extras to Template.
type TemplateExecutor interface {
Template

View File

@ -70,18 +70,26 @@ type templateLoader interface {
}
type templateFuncsterTemplater interface {
templateFuncsterSetter
tpl.TemplateFinder
setFuncs(funcMap map[string]interface{})
}
type templateFuncsterSetter interface {
setTemplateFuncster(f *templateFuncster)
}
// templateHandler holds the templates in play.
// It implements the templateLoader and tpl.TemplateHandler interfaces.
type templateHandler struct {
mu sync.Mutex
// text holds all the pure text templates.
text *textTemplates
html *htmlTemplates
extTextTemplates []*textTemplate
amberFuncMap template.FuncMap
errors []*templateErr
@ -93,6 +101,19 @@ type templateHandler struct {
*deps.Deps
}
// NewTextTemplate provides a text template parser that has all the Hugo
// template funcs etc. built-in.
func (t *templateHandler) NewTextTemplate() tpl.TemplateParseFinder {
t.mu.Lock()
t.mu.Unlock()
tt := &textTemplate{t: texttemplate.New("")}
t.extTextTemplates = append(t.extTextTemplates, tt)
return tt
}
func (t *templateHandler) addError(name string, err error) {
t.errors = append(t.errors, &templateErr{name, err})
}
@ -111,7 +132,7 @@ func (t *templateHandler) PrintErrors() {
// Lookup tries to find a template with the given name in both template
// collections: First HTML, then the plain text template collection.
func (t *templateHandler) Lookup(name string) *tpl.TemplateAdapter {
func (t *templateHandler) Lookup(name string) (tpl.Template, bool) {
if strings.HasPrefix(name, textTmplNamePrefix) {
// The caller has explicitly asked for a text template, so only look
@ -123,8 +144,8 @@ func (t *templateHandler) Lookup(name string) *tpl.TemplateAdapter {
}
// Look in both
if te := t.html.Lookup(name); te != nil {
return te
if te, found := t.html.Lookup(name); found {
return te, true
}
return t.text.Lookup(name)
@ -136,7 +157,7 @@ func (t *templateHandler) clone(d *deps.Deps) *templateHandler {
Deps: d,
layoutsFs: d.BaseFs.Layouts.Fs,
html: &htmlTemplates{t: template.Must(t.html.t.Clone()), overlays: make(map[string]*template.Template)},
text: &textTemplates{t: texttemplate.Must(t.text.t.Clone()), overlays: make(map[string]*texttemplate.Template)},
text: &textTemplates{textTemplate: &textTemplate{t: texttemplate.Must(t.text.t.Clone())}, overlays: make(map[string]*texttemplate.Template)},
errors: make([]*templateErr, 0),
}
@ -171,7 +192,7 @@ func newTemplateAdapter(deps *deps.Deps) *templateHandler {
overlays: make(map[string]*template.Template),
}
textT := &textTemplates{
t: texttemplate.New(""),
textTemplate: &textTemplate{t: texttemplate.New("")},
overlays: make(map[string]*texttemplate.Template),
}
return &templateHandler{
@ -205,12 +226,12 @@ func (t *htmlTemplates) setTemplateFuncster(f *templateFuncster) {
t.funcster = f
}
func (t *htmlTemplates) Lookup(name string) *tpl.TemplateAdapter {
func (t *htmlTemplates) Lookup(name string) (tpl.Template, bool) {
templ := t.lookup(name)
if templ == nil {
return nil
return nil, false
}
return &tpl.TemplateAdapter{Template: templ, Metrics: t.funcster.Deps.Metrics}
return &tpl.TemplateAdapter{Template: templ, Metrics: t.funcster.Deps.Metrics}, true
}
func (t *htmlTemplates) lookup(name string) *template.Template {
@ -233,27 +254,25 @@ func (t *htmlTemplates) lookup(name string) *template.Template {
return nil
}
func (t *textTemplates) setTemplateFuncster(f *templateFuncster) {
t.funcster = f
}
type textTemplates struct {
*textTemplate
funcster *templateFuncster
t *texttemplate.Template
clone *texttemplate.Template
cloneClone *texttemplate.Template
overlays map[string]*texttemplate.Template
}
func (t *textTemplates) setTemplateFuncster(f *templateFuncster) {
t.funcster = f
}
func (t *textTemplates) Lookup(name string) *tpl.TemplateAdapter {
func (t *textTemplates) Lookup(name string) (tpl.Template, bool) {
templ := t.lookup(name)
if templ == nil {
return nil
return nil, false
}
return &tpl.TemplateAdapter{Template: templ, Metrics: t.funcster.Deps.Metrics}
return &tpl.TemplateAdapter{Template: templ, Metrics: t.funcster.Deps.Metrics}, true
}
func (t *textTemplates) lookup(name string) *texttemplate.Template {
@ -336,9 +355,34 @@ func (t *htmlTemplates) addLateTemplate(name, tpl string) error {
return t.addTemplateIn(t.clone, name, tpl)
}
type textTemplate struct {
t *texttemplate.Template
}
func (t *textTemplate) Parse(name, tpl string) (tpl.Template, error) {
return t.parSeIn(t.t, name, tpl)
}
func (t *textTemplate) Lookup(name string) (tpl.Template, bool) {
tpl := t.t.Lookup(name)
return tpl, tpl != nil
}
func (t *textTemplate) parSeIn(tt *texttemplate.Template, name, tpl string) (*texttemplate.Template, error) {
templ, err := tt.New(name).Parse(tpl)
if err != nil {
return nil, err
}
if err := applyTemplateTransformersToTextTemplate(templ); err != nil {
return nil, err
}
return templ, nil
}
func (t *textTemplates) addTemplateIn(tt *texttemplate.Template, name, tpl string) error {
name = strings.TrimPrefix(name, textTmplNamePrefix)
templ, err := tt.New(name).Parse(tpl)
templ, err := t.parSeIn(tt, name, tpl)
if err != nil {
return err
}
@ -467,17 +511,22 @@ func (t *templateHandler) initFuncs() {
// Both template types will get their own funcster instance, which
// in the current case contains the same set of funcs.
for _, funcsterHolder := range []templateFuncsterTemplater{t.html, t.text} {
funcMap := createFuncMap(t.Deps)
for _, funcsterHolder := range []templateFuncsterSetter{t.html, t.text} {
funcster := newTemplateFuncster(t.Deps)
// The URL funcs in the funcMap is somewhat language dependent,
// so we need to wait until the language and site config is loaded.
funcster.initFuncMap()
funcster.initFuncMap(funcMap)
funcsterHolder.setTemplateFuncster(funcster)
}
for _, extText := range t.extTextTemplates {
extText.t.Funcs(funcMap)
}
// Amber is HTML only.
t.amberFuncMap = template.FuncMap{}

View File

@ -51,12 +51,12 @@ func (t *templateFuncster) partial(name string, contextList ...interface{}) (int
}
for _, n := range []string{"partials/" + name, "theme/partials/" + name} {
templ := t.Tmpl.Lookup(n)
if templ == nil {
templ, found := t.Tmpl.Lookup(n)
if !found {
// For legacy reasons.
templ = t.Tmpl.Lookup(n + ".html")
templ, found = t.Tmpl.Lookup(n + ".html")
}
if templ != nil {
if found {
b := bp.GetBuffer()
defer bp.PutBuffer(b)
@ -64,7 +64,7 @@ func (t *templateFuncster) partial(name string, contextList ...interface{}) (int
return "", err
}
if _, ok := templ.Template.(*texttemplate.Template); ok {
if _, ok := templ.(*texttemplate.Template); ok {
return b.String(), nil
}

View File

@ -30,6 +30,8 @@ func (*TemplateProvider) Update(deps *deps.Deps) error {
newTmpl := newTemplateAdapter(deps)
deps.Tmpl = newTmpl
deps.TextTmpl = newTmpl.NewTextTemplate()
newTmpl.initFuncs()
newTmpl.loadEmbedded()

View File

@ -18,6 +18,8 @@ package tplimpl
import (
"html/template"
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/tpl/internal"
// Init the namespaces
@ -35,6 +37,7 @@ import (
_ "github.com/gohugoio/hugo/tpl/os"
_ "github.com/gohugoio/hugo/tpl/partials"
_ "github.com/gohugoio/hugo/tpl/path"
_ "github.com/gohugoio/hugo/tpl/resources"
_ "github.com/gohugoio/hugo/tpl/safe"
_ "github.com/gohugoio/hugo/tpl/strings"
_ "github.com/gohugoio/hugo/tpl/time"
@ -42,12 +45,12 @@ import (
_ "github.com/gohugoio/hugo/tpl/urls"
)
func (t *templateFuncster) initFuncMap() {
func createFuncMap(d *deps.Deps) map[string]interface{} {
funcMap := template.FuncMap{}
// Merge the namespace funcs
for _, nsf := range internal.TemplateFuncsNamespaceRegistry {
ns := nsf(t.Deps)
ns := nsf(d)
if _, exists := funcMap[ns.Name]; exists {
panic(ns.Name + " is a duplicate template func")
}
@ -61,8 +64,13 @@ func (t *templateFuncster) initFuncMap() {
}
}
}
return funcMap
}
func (t *templateFuncster) initFuncMap(funcMap template.FuncMap) {
t.funcMap = funcMap
t.Tmpl.(*templateHandler).setFuncs(funcMap)
}

View File

@ -51,6 +51,9 @@ func newTestConfig() config.Provider {
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("archetypeDir", "archetypes")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
return v
}
@ -76,12 +79,13 @@ func TestTemplateFuncsExamples(t *testing.T) {
v.Set("workingDir", workingDir)
v.Set("multilingual", true)
v.Set("contentDir", "content")
v.Set("assetDir", "assets")
v.Set("baseURL", "http://mysite.com/hugo/")
v.Set("CurrentContentLanguage", langs.NewLanguage("en", v))
fs := hugofs.NewMem(v)
afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755)
afero.WriteFile(fs.Source, filepath.Join(workingDir, "files", "README.txt"), []byte("Hugo Rocks!"), 0755)
depsCfg := newDepsConfig(v)
depsCfg.Fs = fs
@ -113,7 +117,8 @@ func TestTemplateFuncsExamples(t *testing.T) {
require.NoError(t, d.LoadResources())
var b bytes.Buffer
require.NoError(t, d.Tmpl.Lookup("test").Execute(&b, &data))
templ, _ := d.Tmpl.Lookup("test")
require.NoError(t, templ.Execute(&b, &data))
if b.String() != expected {
t.Fatalf("%s[%d]: got %q expected %q", ns.Name, i, b.String(), expected)
}

View File

@ -18,6 +18,7 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/tpl"
"github.com/stretchr/testify/require"
)
@ -43,20 +44,22 @@ func TestHTMLEscape(t *testing.T) {
d, err := deps.New(depsCfg)
assert.NoError(err)
tpl := `{{ "<h1>Hi!</h1>" | safeHTML }}`
templ := `{{ "<h1>Hi!</h1>" | safeHTML }}`
provider := DefaultTemplateProvider
provider.Update(d)
h := d.Tmpl.(handler)
assert.NoError(h.addTemplate("shortcodes/myShort.html", tpl))
assert.NoError(h.addTemplate("shortcodes/myShort.html", templ))
s, err := d.Tmpl.Lookup("shortcodes/myShort.html").ExecuteToString(data)
tt, _ := d.Tmpl.Lookup("shortcodes/myShort.html")
s, err := tt.(tpl.TemplateExecutor).ExecuteToString(data)
assert.NoError(err)
assert.Contains(s, "<h1>Hi!</h1>")
s, err = d.Tmpl.Lookup("shortcodes/myShort").ExecuteToString(data)
tt, _ = d.Tmpl.Lookup("shortcodes/myShort")
s, err = tt.(tpl.TemplateExecutor).ExecuteToString(data)
assert.NoError(err)
assert.Contains(s, "<h1>Hi!</h1>")